Skip to content

Refactor tokenizer test and add to cmake #8450

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Feb 15, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -186,6 +186,10 @@ option(EXECUTORCH_BUILD_EXTENSION_FLAT_TENSOR "Build the Flat Tensor extension"
OFF
)

option(EXECUTORCH_BUILD_EXTENSION_LLM "Build the LLM extension"
OFF
)

option(EXECUTORCH_BUILD_EXTENSION_MODULE "Build the Module extension" OFF)

option(EXECUTORCH_BUILD_EXTENSION_RUNNER_UTIL "Build the Runner Util extension"
Expand Down Expand Up @@ -718,6 +722,10 @@ if(EXECUTORCH_BUILD_EXTENSION_FLAT_TENSOR)
add_subdirectory(${CMAKE_CURRENT_SOURCE_DIR}/extension/flat_tensor/serialize)
endif()

if(EXECUTORCH_BUILD_EXTENSION_LLM)
add_subdirectory(${CMAKE_CURRENT_SOURCE_DIR}/extension/llm/tokenizer)
endif()

if(EXECUTORCH_BUILD_EXTENSION_MODULE)
add_subdirectory(${CMAKE_CURRENT_SOURCE_DIR}/extension/module)
endif()
Expand Down
3 changes: 3 additions & 0 deletions build/Utils.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,9 @@ function(executorch_print_configuration_summary)
message(STATUS " EXECUTORCH_BUILD_EXTENSION_FLAT_TENSOR : "
"${EXECUTORCH_BUILD_EXTENSION_FLAT_TENSOR}"
)
message(STATUS " EXECUTORCH_BUILD_EXTENSION_LLM : "
"${EXECUTORCH_BUILD_EXTENSION_LLM}"
)
message(STATUS " EXECUTORCH_BUILD_EXTENSION_MODULE : "
"${EXECUTORCH_BUILD_EXTENSION_MODULE}"
)
Expand Down
16 changes: 16 additions & 0 deletions build/cmake_deps.toml
Original file line number Diff line number Diff line change
Expand Up @@ -402,6 +402,22 @@ deps = [
"xnnpack_backend",
]

[targets.extension_llm_tokenizer]
buck_targets = [
"//extension/llm/tokenizer:bpe_tokenizer",
"//extension/llm/tokenizer:tiktoken",
]
filters = [
".cpp$",
]
excludes = [
"^codegen",
]
deps = [
"executorch",
"executorch_core",
]

[targets.llama_runner]
buck_targets = [
"//examples/models/llama/runner:runner",
Expand Down
61 changes: 61 additions & 0 deletions extension/llm/tokenizer/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.

# Please this file formatted by running:
# ~~~
# cmake-format -i CMakeLists.txt
# ~~~

cmake_minimum_required(VERSION 3.19)

# Source root directory for executorch.
if(NOT EXECUTORCH_ROOT)
set(EXECUTORCH_ROOT ${CMAKE_CURRENT_SOURCE_DIR}/../../..)
endif()

set(ABSL_ENABLE_INSTALL ON)
set(ABSL_PROPAGATE_CXX_STD ON)
set(_pic_flag ${CMAKE_POSITION_INDEPENDENT_CODE})
set(CMAKE_POSITION_INDEPENDENT_CODE ON)
add_subdirectory(
${CMAKE_CURRENT_SOURCE_DIR}/../third-party/abseil-cpp
${CMAKE_CURRENT_BINARY_DIR}/abseil-cpp
)
add_subdirectory(
${CMAKE_CURRENT_SOURCE_DIR}/../third-party/re2
${CMAKE_CURRENT_BINARY_DIR}/re2
)
set(CMAKE_POSITION_INDEPENDENT_CODE ${_pic_flag})

list(TRANSFORM _extension_llm_tokenizer__srcs PREPEND "${EXECUTORCH_ROOT}/")
add_library(extension_llm_tokenizer ${_extension_llm_tokenizer__srcs})
target_include_directories(
extension_llm_tokenizer PUBLIC ${EXECUTORCH_ROOT}/..
${_common_include_directories}
)

target_link_libraries(extension_llm_tokenizer re2::re2)
target_compile_options(
extension_llm_tokenizer PUBLIC ${_common_compile_options}
)

# Install libraries
install(
TARGETS extension_llm_tokenizer
DESTINATION lib
INCLUDES
DESTINATION ${_common_include_directories}
)

target_include_directories(
extension_llm_tokenizer
PRIVATE ${CMAKE_INSTALL_PREFIX}/include
${CMAKE_CURRENT_SOURCE_DIR}/../third-party/abseil-cpp
)

if(BUILD_TESTING)
add_subdirectory(test)
endif()
35 changes: 7 additions & 28 deletions extension/llm/tokenizer/test/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -12,39 +12,18 @@
#

cmake_minimum_required(VERSION 3.19)
project(tokenizer_test)

# Use C++17 for test.
set(CMAKE_CXX_STANDARD 17)

set(EXECUTORCH_ROOT ${CMAKE_CURRENT_SOURCE_DIR}/../../../..)

include(${EXECUTORCH_ROOT}/build/Test.cmake)

set(_tokenizer_test_srcs
test_tiktoken.cpp test_bpe_tokenizer.cpp
${CMAKE_CURRENT_SOURCE_DIR}/../tiktoken.cpp
${CMAKE_CURRENT_SOURCE_DIR}/../bpe_tokenizer.cpp
)
set(test_env "RESOURCES_PATH=${EXECUTORCH_ROOT}/extension/llm/tokenizer/test/resources")

set(ENV{RESOURCES_PATH} ${CMAKE_CURRENT_SOURCE_DIR}/resources)
set(ABSL_ENABLE_INSTALL ON)
set(ABSL_PROPAGATE_CXX_STD ON)
set(_pic_flag ${CMAKE_POSITION_INDEPENDENT_CODE})
set(CMAKE_POSITION_INDEPENDENT_CODE ON)
add_subdirectory(
${CMAKE_CURRENT_SOURCE_DIR}/../../third-party/abseil-cpp
${CMAKE_CURRENT_BINARY_DIR}/abseil-cpp
)
add_subdirectory(
${CMAKE_CURRENT_SOURCE_DIR}/../../third-party/re2
${CMAKE_CURRENT_BINARY_DIR}/re2
)
set(CMAKE_POSITION_INDEPENDENT_CODE ${_pic_flag})
set(_test_srcs test_bpe_tokenizer.cpp test_tiktoken.cpp)

et_cxx_test(tokenizer_test SOURCES ${_tokenizer_test_srcs} EXTRA_LIBS re2::re2)
target_include_directories(
tokenizer_test
PRIVATE ${CMAKE_INSTALL_PREFIX}/include
${CMAKE_CURRENT_SOURCE_DIR}/../../third-party/abseil-cpp
et_cxx_test(
extension_llm_tokenizer_test SOURCES ${_test_srcs} EXTRA_LIBS
extension_llm_tokenizer
)

set_property(TEST extension_llm_tokenizer_test PROPERTY ENVIRONMENT ${test_env})
7 changes: 7 additions & 0 deletions extension/llm/tokenizer/test/targets.bzl
Original file line number Diff line number Diff line change
@@ -1,3 +1,8 @@
load(
"@fbsource//tools/build_defs:default_platform_defs.bzl",
"ANDROID",
"CXX",
)
load("@fbsource//xplat/executorch/build:runtime_wrapper.bzl", "runtime")

def define_common_targets():
Expand Down Expand Up @@ -28,6 +33,7 @@ def define_common_targets():
env = {
"RESOURCES_PATH": "$(location :resources)/resources",
},
platforms = [CXX, ANDROID], # Cannot bundle resources on Apple platform.
)

runtime.cxx_test(
Expand All @@ -41,6 +47,7 @@ def define_common_targets():
env = {
"RESOURCES_PATH": "$(location :resources)/resources",
},
platforms = [CXX, ANDROID], # Cannot bundle resources on Apple platform.
external_deps = [
"re2",
],
Expand Down
13 changes: 2 additions & 11 deletions extension/llm/tokenizer/test/test_bpe_tokenizer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,9 @@
* LICENSE file in the root directory of this source tree.
*/

#ifdef EXECUTORCH_FB_BUCK
#include <TestResourceUtils/TestResourceUtils.h>
#endif
#include <executorch/extension/llm/tokenizer/bpe_tokenizer.h>
#include <executorch/runtime/platform/runtime.h>
#include <gtest/gtest.h>
#include <vector>

using namespace ::testing;

Expand All @@ -26,13 +22,8 @@ class TokenizerExtensionTest : public Test {
void SetUp() override {
executorch::runtime::runtime_init();
tokenizer_ = std::make_unique<BPETokenizer>();
#ifdef EXECUTORCH_FB_BUCK
modelPath_ = facebook::xplat::testing::getPathForTestResource(
"resources/test_bpe_tokenizer.bin");
#else
modelPath_ =
std::getenv("RESOURCES_PATH") + std::string("/test_bpe_tokenizer.bin");
#endif
}

std::unique_ptr<Tokenizer> tokenizer_;
Expand All @@ -50,15 +41,15 @@ TEST_F(TokenizerExtensionTest, DecodeWithoutLoadFails) {
}

TEST_F(TokenizerExtensionTest, DecodeOutOfRangeFails) {
Error res = tokenizer_->load(modelPath_.c_str());
Error res = tokenizer_->load(modelPath_);
EXPECT_EQ(res, Error::Ok);
auto result = tokenizer_->decode(0, 64000);
// The vocab size is 32000, and token 64000 is out of vocab range.
EXPECT_EQ(result.error(), Error::NotSupported);
}

TEST_F(TokenizerExtensionTest, TokenizerMetadataIsExpected) {
Error res = tokenizer_->load(modelPath_.c_str());
Error res = tokenizer_->load(modelPath_);
EXPECT_EQ(res, Error::Ok);
// test_bpe_tokenizer.bin has vocab_size 0, bos_id 0, eos_id 0 recorded.
EXPECT_EQ(tokenizer_->vocab_size(), 0);
Expand Down
46 changes: 18 additions & 28 deletions extension/llm/tokenizer/test/test_tiktoken.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -6,14 +6,10 @@
* LICENSE file in the root directory of this source tree.
*/

#ifdef EXECUTORCH_FB_BUCK
#include <TestResourceUtils/TestResourceUtils.h>
#endif
#include <executorch/extension/llm/tokenizer/tiktoken.h>
#include <executorch/runtime/platform/runtime.h>
#include <gmock/gmock.h>
#include <gtest/gtest.h>
#include <vector>

using namespace ::testing;
using ::executorch::extension::llm::Tiktoken;
Expand Down Expand Up @@ -49,15 +45,6 @@ static inline std::unique_ptr<std::vector<std::string>> _get_special_tokens() {
}
return special_tokens;
}

static inline std::string _get_resource_path(const std::string& name) {
#ifdef EXECUTORCH_FB_BUCK
return facebook::xplat::testing::getPathForTestResource("resources/" + name);
#else
return std::getenv("RESOURCES_PATH") + std::string("/") + name;
#endif
}

} // namespace

class TiktokenExtensionTest : public Test {
Expand All @@ -66,7 +53,8 @@ class TiktokenExtensionTest : public Test {
executorch::runtime::runtime_init();
tokenizer_ = std::make_unique<Tiktoken>(
_get_special_tokens(), kBOSTokenIndex, kEOSTokenIndex);
modelPath_ = _get_resource_path("test_tiktoken_tokenizer.model");
modelPath_ = std::getenv("RESOURCES_PATH") +
std::string("/test_tiktoken_tokenizer.model");
}

std::unique_ptr<Tokenizer> tokenizer_;
Expand All @@ -84,15 +72,15 @@ TEST_F(TiktokenExtensionTest, DecodeWithoutLoadFails) {
}

TEST_F(TiktokenExtensionTest, TokenizerVocabSizeIsExpected) {
Error res = tokenizer_->load(modelPath_.c_str());
Error res = tokenizer_->load(modelPath_);
EXPECT_EQ(res, Error::Ok);
EXPECT_EQ(tokenizer_->vocab_size(), 128256);
EXPECT_EQ(tokenizer_->bos_tok(), 128000);
EXPECT_EQ(tokenizer_->eos_tok(), 128001);
}

TEST_F(TiktokenExtensionTest, TokenizerEncodeCorrectly) {
Error res = tokenizer_->load(modelPath_.c_str());
Error res = tokenizer_->load(modelPath_);
EXPECT_EQ(res, Error::Ok);
Result<std::vector<uint64_t>> out = tokenizer_->encode("hello world", 1, 0);
EXPECT_EQ(out.error(), Error::Ok);
Expand All @@ -103,7 +91,7 @@ TEST_F(TiktokenExtensionTest, TokenizerEncodeCorrectly) {
}

TEST_F(TiktokenExtensionTest, TokenizerDecodeCorrectly) {
Error res = tokenizer_->load(modelPath_.c_str());
Error res = tokenizer_->load(modelPath_);
EXPECT_EQ(res, Error::Ok);
std::vector<std::string> expected = {"<|begin_of_text|>", "hello", " world"};
std::vector<uint64_t> tokens = {128000, 15339, 1917};
Expand All @@ -115,7 +103,7 @@ TEST_F(TiktokenExtensionTest, TokenizerDecodeCorrectly) {
}

TEST_F(TiktokenExtensionTest, TokenizerDecodeOutOfRangeFails) {
Error res = tokenizer_->load(modelPath_.c_str());
Error res = tokenizer_->load(modelPath_);
EXPECT_EQ(res, Error::Ok);
// The vocab size is 128256, addes 256 just so the token is out of vocab
// range.
Expand Down Expand Up @@ -160,31 +148,33 @@ TEST_F(TiktokenExtensionTest, LoadWithInvalidPath) {
}

TEST_F(TiktokenExtensionTest, LoadTiktokenFileWithInvalidRank) {
auto invalidModelPath =
_get_resource_path("test_tiktoken_invalid_rank.model");
Error res = tokenizer_->load(invalidModelPath.c_str());
auto invalidModelPath = std::getenv("RESOURCES_PATH") +
std::string("/test_tiktoken_invalid_rank.model");
Error res = tokenizer_->load(invalidModelPath);

EXPECT_EQ(res, Error::InvalidArgument);
}

TEST_F(TiktokenExtensionTest, LoadTiktokenFileWithInvalidBase64) {
auto invalidModelPath =
_get_resource_path("test_tiktoken_invalid_base64.model");
Error res = tokenizer_->load(invalidModelPath.c_str());
auto invalidModelPath = std::getenv("RESOURCES_PATH") +
std::string("/test_tiktoken_invalid_base64.model");
Error res = tokenizer_->load(invalidModelPath);

EXPECT_EQ(res, Error::InvalidArgument);
}

TEST_F(TiktokenExtensionTest, LoadTiktokenFileWithNoSpace) {
auto invalidModelPath = _get_resource_path("test_tiktoken_no_space.model");
Error res = tokenizer_->load(invalidModelPath.c_str());
auto invalidModelPath = std::getenv("RESOURCES_PATH") +
std::string("/test_tiktoken_no_space.model");
Error res = tokenizer_->load(invalidModelPath);

EXPECT_EQ(res, Error::InvalidArgument);
}

TEST_F(TiktokenExtensionTest, LoadTiktokenFileWithBPEFile) {
auto invalidModelPath = _get_resource_path("test_bpe_tokenizer.bin");
Error res = tokenizer_->load(invalidModelPath.c_str());
auto invalidModelPath =
std::getenv("RESOURCES_PATH") + std::string("/test_bpe_tokenizer.bin");
Error res = tokenizer_->load(invalidModelPath);

EXPECT_EQ(res, Error::InvalidArgument);
}
2 changes: 0 additions & 2 deletions test/run_oss_cpp_tests.sh
Original file line number Diff line number Diff line change
Expand Up @@ -57,8 +57,6 @@ build_and_run_test() {

if [[ "$test_dir" =~ .*examples/models/llama/tokenizer.* ]]; then
RESOURCES_PATH=$(realpath examples/models/llama/tokenizer/test/resources)
elif [[ "$test_dir" =~ .*extension/llm/tokenizer.* ]]; then
RESOURCES_PATH=$(realpath extension/llm/tokenizer/test/resources)
fi
export RESOURCES_PATH

Expand Down
Loading