|
{ |
|
"model": { |
|
"path": "TheBloke/Llama-2-7B-Chat-GGML", |
|
"type": "llama", |
|
"max_new_tokens": 512, |
|
"temperature": 0.5 |
|
}, |
|
"tokenizer": { |
|
"type": "AutoTokenizer", |
|
"name": "sentence-transformers/all-MiniLM-L6-v2", |
|
"kwargs": { |
|
"device": "cpu" |
|
} |
|
}, |
|
"vector_store": { |
|
"path": "vectorstores/db_faiss", |
|
"embeddings": { |
|
"type": "HuggingFaceEmbeddings", |
|
"name": "sentence-transformers/all-MiniLM-L6-v2", |
|
"kwargs": { |
|
"device": "cpu" |
|
} |
|
} |
|
}, |
|
"retriever": { |
|
"type": "FAISS", |
|
"path": "vectorstores/db_faiss" |
|
}, |
|
"chain": { |
|
"type": "RetrievalQA", |
|
"chain_type": "stuff", |
|
"chain_type_kwargs": { |
|
"prompt": { |
|
"template": "Use the following pieces of information...", |
|
"input_variables": ["context", "question"] |
|
} |
|
} |
|
}, |
|
"api": { |
|
"port": 5000, |
|
"num_threads": 8 |
|
} |
|
} |
|
|