t5 : add example for text-to-text transfer transformer inference

pull/12/head
Georgi Gerganov 1 year ago
parent bd9f710a45
commit ed683187cb
No known key found for this signature in database
GPG Key ID: 449E073F9DC10735

@ -4,3 +4,4 @@ target_include_directories(ggml_utils PUBLIC ${CMAKE_CURRENT_SOURCE_DIR})
add_subdirectory(gpt-2)
add_subdirectory(gpt-j)
add_subdirectory(whisper)
add_subdirectory(t5)

@ -0,0 +1,6 @@
#
# t5
set(TEST_TARGET t5)
add_executable(${TEST_TARGET} main.cpp)
target_link_libraries(${TEST_TARGET} PRIVATE ggml ggml_utils)

@ -0,0 +1,3 @@
# t5
ref: https://github.com/huggingface/transformers/blob/main/src/transformers/models/t5/modeling_t5.py

@ -0,0 +1,25 @@
import io
import sys
import torch
import code
from transformers import AutoModelForSeq2SeqLM, AutoTokenizer
if len(sys.argv) < 3:
print("Usage: convert-flan-t5-pt-to-ggml.py path-to-pt-model dir-output [use-f32]\n")
sys.exit(1)
fname_inp=sys.argv[1] + "/pytorch_model.bin"
try:
model_bytes = open(fname_inp, "rb").read()
with io.BytesIO(model_bytes) as fp:
checkpoint = torch.load(fp, map_location="cpu")
except:
print("Error: failed to load PyTorch model file: %s" % fname_inp)
sys.exit(1)
# list all keys
for k in checkpoint.keys():
print(k)

@ -0,0 +1,3 @@
int main() {
return 0;
}
Loading…
Cancel
Save