yessenzhar's picture
update models for newer trt 0.6.1 version
dd20dba
# Copyright 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of NVIDIA CORPORATION nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
name: "postprocessing"
backend: "python"
max_batch_size: 128
input [
{
name: "TOKENS_BATCH"
data_type: TYPE_INT32
dims: [ -1, -1 ]
},
{
name: "SEQUENCE_LENGTH"
data_type: TYPE_INT32
dims: [ -1 ]
},
{
name: "CUM_LOG_PROBS"
data_type: TYPE_FP32
dims: [ -1 ]
},
{
name: "OUTPUT_LOG_PROBS"
data_type: TYPE_FP32
dims: [ -1, -1 ]
}
]
output [
{
name: "OUTPUT"
data_type: TYPE_STRING
dims: [ -1 ]
},
{
name: "OUT_CUM_LOG_PROBS"
data_type: TYPE_FP32
dims: [ -1 ]
},
{
name: "OUT_OUTPUT_LOG_PROBS"
data_type: TYPE_FP32
dims: [ -1, -1 ]
},
{
name: "OUTPUT_LENS"
data_type: TYPE_INT32
dims: [ -1 ]
}
]
parameters {
key: "tokenizer_dir"
value: {
string_value: "/data/llama/Llama-2-70b-chat-hf/"
}
}
parameters {
key: "tokenizer_type"
value: {
string_value: "llama"
}
}
parameters {
key: "skip_special_tokens"
value: {
string_value: "True"
}
}
instance_group [
{
count: 4
kind: KIND_CPU
}
]