-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathpyproject.toml
More file actions
146 lines (132 loc) · 3.01 KB
/
pyproject.toml
File metadata and controls
146 lines (132 loc) · 3.01 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
[project]
name = "code-tokenizer"
version = "1.0.3"
description = "A professional CLI tool for counting AI model tokens in code projects"
readme = "README.md"
license = "MIT"
requires-python = ">=3.10"
authors = [
{ name = "Code Tokenizer Contributors" }
]
maintainers = [
{ name = "Code Tokenizer Contributors" }
]
keywords = [
"ai",
"tokens",
"code-analysis",
"cli",
"openai",
"gpt",
"claude",
"llm"
]
classifiers = [
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Programming Language :: Python :: 3.13",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Text Processing",
"Topic :: Utilities",
]
dependencies = [
"click>=8.3.0",
"rich>=14.2.0",
"tiktoken>=0.12.0",
]
[project.optional-dependencies]
dev = [
"pytest>=8.0.0",
"pytest-cov>=4.0.0",
"black>=24.0.0",
"isort>=5.13.0",
"mypy>=1.8.0",
"flake8>=7.0.0",
"safety>=3.0.0",
]
[project.scripts]
code-tokenizer = "code_tokenizer.main:cli"
[project.urls]
Documentation = "https://github.com/org-hex/code-tokenizer#readme"
Homepage = "https://github.com/org-hex/code-tokenizer"
Repository = "https://github.com/org-hex/code-tokenizer.git"
Issues = "https://github.com/org-hex/code-tokenizer/issues"
[tool.uv]
publish-url = "https://upload.pypi.org/legacy/"
[tool.black]
line-length = 88
target-version = ['py310']
include = '\.pyi?$'
extend-exclude = '''
/(
# directories
\.eggs
| \.git
| \.hg
| \.mypy_cache
| \.tox
| \.venv
| build
| dist
)/
'''
[tool.isort]
profile = "black"
multi_line_output = 3
line_length = 88
known_first_party = ["code_tokenizer"]
[tool.mypy]
python_version = "3.10"
warn_return_any = true
warn_unused_configs = true
disallow_untyped_defs = true
disallow_incomplete_defs = true
check_untyped_defs = true
disallow_untyped_decorators = true
no_implicit_optional = true
warn_redundant_casts = true
warn_unused_ignores = true
warn_no_return = true
warn_unreachable = true
strict_equality = true
[tool.pytest.ini_options]
minversion = "8.0"
addopts = "-ra -q --strict-markers --strict-config"
testpaths = ["tests"]
python_files = ["test_*.py"]
python_classes = ["Test*"]
python_functions = ["test_*"]
[tool.coverage.run]
source = ["."]
omit = [
"tests/*",
"test_*",
"setup.py",
]
[tool.coverage.report]
exclude_lines = [
"pragma: no cover",
"def __repr__",
"if self.debug:",
"if settings.DEBUG",
"raise AssertionError",
"raise NotImplementedError",
"if 0:",
"if __name__ == .__main__.:",
"class .*\\bProtocol\\):",
"@(abc\\.)?abstractmethod",
]
[dependency-groups]
dev = [
"build>=1.3.0",
"twine>=6.2.0",
]