aboutsummaryrefslogtreecommitdiff
path: root/R2R/r2r/examples/configs
diff options
context:
space:
mode:
Diffstat (limited to 'R2R/r2r/examples/configs')
-rwxr-xr-xR2R/r2r/examples/configs/local_neo4j_kg.json69
-rwxr-xr-xR2R/r2r/examples/configs/local_ollama.json41
-rwxr-xr-xR2R/r2r/examples/configs/local_ollama_rerank.json46
-rwxr-xr-xR2R/r2r/examples/configs/neo4j_kg.json27
-rwxr-xr-xR2R/r2r/examples/configs/postgres_logging.json7
5 files changed, 190 insertions, 0 deletions
diff --git a/R2R/r2r/examples/configs/local_neo4j_kg.json b/R2R/r2r/examples/configs/local_neo4j_kg.json
new file mode 100755
index 00000000..0b4254dc
--- /dev/null
+++ b/R2R/r2r/examples/configs/local_neo4j_kg.json
@@ -0,0 +1,69 @@
+{
+ "kg": {
+ "provider": "neo4j",
+ "batch_size": 1,
+ "text_splitter": {
+ "type": "recursive_character",
+ "chunk_size": 512,
+ "chunk_overlap": 0
+ },
+ "max_entities": 10,
+ "max_relations": 20,
+ "kg_extraction_prompt": "zero_shot_ner_kg_extraction",
+ "kg_extraction_config": {
+ "model": "ollama/sciphi/triplex",
+ "temperature": 1.0,
+ "top_p": 1.0,
+ "top_k": 100,
+ "max_tokens_to_sample": 1024,
+ "stream": false,
+ "functions": null,
+ "skip_special_tokens": false,
+ "stop_token": null,
+ "num_beams": 1,
+ "do_sample": true,
+ "generate_with_chat": false,
+ "add_generation_kwargs": {},
+ "api_base": null
+ }
+ },
+ "completions": {
+ "provider": "litellm",
+ "generation_config": {
+ "model": "ollama/llama3",
+ "temperature": 0.1,
+ "top_p": 1.0,
+ "top_k": 100,
+ "max_tokens_to_sample": 1024,
+ "stream": false,
+ "functions": null,
+ "skip_special_tokens": false,
+ "stop_token": null,
+ "num_beams": 1,
+ "do_sample": true,
+ "generate_with_chat": false,
+ "add_generation_kwargs": {},
+ "api_base": null
+ }
+ },
+ "embedding": {
+ "provider": "ollama",
+ "base_model": "mxbai-embed-large",
+ "base_dimension": 1024,
+ "batch_size": 32
+ },
+ "ingestion":{
+ "excluded_parsers": [
+ "gif",
+ "jpeg",
+ "jpg",
+ "png",
+ "svg",
+ "mp3",
+ "mp4"
+ ]
+ },
+ "vector_database": {
+ "provider": "pgvector"
+ }
+}
diff --git a/R2R/r2r/examples/configs/local_ollama.json b/R2R/r2r/examples/configs/local_ollama.json
new file mode 100755
index 00000000..d6fd68a5
--- /dev/null
+++ b/R2R/r2r/examples/configs/local_ollama.json
@@ -0,0 +1,41 @@
+{
+ "completions": {
+ "provider": "litellm",
+ "generation_config": {
+ "model": "ollama/llama3",
+ "temperature": 0.1,
+ "top_p": 1.0,
+ "top_k": 100,
+ "max_tokens_to_sample": 1024,
+ "stream": false,
+ "functions": null,
+ "skip_special_tokens": false,
+ "stop_token": null,
+ "num_beams": 1,
+ "do_sample": true,
+ "generate_with_chat": false,
+ "add_generation_kwargs": {},
+ "api_base": null
+ }
+ },
+ "embedding": {
+ "provider": "ollama",
+ "base_model": "mxbai-embed-large",
+ "base_dimension": 1024,
+ "batch_size": 32
+ },
+ "ingestion":{
+ "excluded_parsers": [
+ "gif",
+ "jpeg",
+ "jpg",
+ "png",
+ "svg",
+ "mp3",
+ "mp4"
+ ]
+ },
+ "vector_database": {
+ "provider": "pgvector"
+ }
+}
diff --git a/R2R/r2r/examples/configs/local_ollama_rerank.json b/R2R/r2r/examples/configs/local_ollama_rerank.json
new file mode 100755
index 00000000..3a9abbe2
--- /dev/null
+++ b/R2R/r2r/examples/configs/local_ollama_rerank.json
@@ -0,0 +1,46 @@
+{
+ "completions": {
+ "provider": "litellm",
+ "generation_config": {
+ "model": "ollama/llama3",
+ "temperature": 0.1,
+ "top_p": 1.0,
+ "top_k": 100,
+ "max_tokens_to_sample": 1024,
+ "stream": false,
+ "functions": null,
+ "skip_special_tokens": false,
+ "stop_token": null,
+ "num_beams": 1,
+ "do_sample": true,
+ "generate_with_chat": false,
+ "add_generation_kwargs": {},
+ "api_base": null
+ }
+ },
+ "embedding": {
+ "provider": "sentence-transformers",
+ "base_model": "all-MiniLM-L6-v2",
+ "base_dimension": 384,
+ "rerank_model": "jinaai/jina-reranker-v1-turbo-en",
+ "rerank_dimension": 384,
+ "rerank_transformer_type": "CrossEncoder",
+ "batch_size": 32,
+ "text_splitter": {
+ "type": "recursive_character",
+ "chunk_size": 512,
+ "chunk_overlap": 20
+ }
+ },
+ "ingestion":{
+ "excluded_parsers": [
+ "gif",
+ "jpeg",
+ "jpg",
+ "png",
+ "svg",
+ "mp3",
+ "mp4"
+ ]
+ }
+}
diff --git a/R2R/r2r/examples/configs/neo4j_kg.json b/R2R/r2r/examples/configs/neo4j_kg.json
new file mode 100755
index 00000000..67fd0682
--- /dev/null
+++ b/R2R/r2r/examples/configs/neo4j_kg.json
@@ -0,0 +1,27 @@
+{
+ "kg": {
+ "provider": "neo4j",
+ "batch_size": 1,
+ "text_splitter": {
+ "type": "recursive_character",
+ "chunk_size": 1024,
+ "chunk_overlap": 0
+ },
+ "kg_extraction_config": {
+ "model": "gpt-4o",
+ "temperature": 0.1,
+ "top_p": 1.0,
+ "top_k": 100,
+ "max_tokens_to_sample": 1024,
+ "stream": false,
+ "functions": null,
+ "skip_special_tokens": false,
+ "stop_token": null,
+ "num_beams": 1,
+ "do_sample": true,
+ "generate_with_chat": false,
+ "add_generation_kwargs": {},
+ "api_base": null
+ }
+ }
+}
diff --git a/R2R/r2r/examples/configs/postgres_logging.json b/R2R/r2r/examples/configs/postgres_logging.json
new file mode 100755
index 00000000..ec659bf4
--- /dev/null
+++ b/R2R/r2r/examples/configs/postgres_logging.json
@@ -0,0 +1,7 @@
+{
+ "logging": {
+ "provider": "postgres",
+ "log_table": "logs",
+ "log_info_table": "log_info"
+ }
+}