-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathconfig.py
More file actions
61 lines (48 loc) · 1.21 KB
/
config.py
File metadata and controls
61 lines (48 loc) · 1.21 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
# config.py
#
# Global settings for the Codeseek Engine.
from pathlib import Path
# ---- Model / AI backend ----
# Default local model name (for Ollama or similar).
MODEL_NAME = "llama3.2"
# ---- File system / indexing settings ----
# Default root directory to search.
# By default: the current working directory when you run the tool.
DEFAULT_ROOT_DIR = Path(".")
# File extensions we will scan.
ALLOWED_EXTENSIONS = {
".txt",
".md",
".py",
".java",
".sql",
".json",
".js",
".ts",
".go",
".rs",
".c",
".cpp",
".h",
".yaml",
".yml",
}
# Directories to ignore while walking the filesystem.
IGNORE_DIRS = {
".git",
"__pycache__",
"node_modules",
".idea",
".vscode",
}
# Maximum file size (in bytes) to read.
MAX_FILE_SIZE_BYTES = 5 * 1024 * 1024 # 5 MB
# How many lines to group into one chunk.
CHUNK_LINE_COUNT = 40
# How many top chunks to keep before sending anything to the LLM.
CANDIDATE_CHUNK_LIMIT = 20
# Where to store any cached index/embeddings (later if we want).
DATA_DIR = Path("data")
# ---- Ollama settings ----
# Ollama API base URL. Override this if running Ollama on a non-default host/port.
OLLAMA_BASE_URL = "http://localhost:11434"