Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
48 changes: 45 additions & 3 deletions apps/web/src/lib/server/datasets.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import fs from 'fs';
import path from 'path';
import { createRequire } from 'node:module';
import Database from 'better-sqlite3';
import type { DatasetSummary } from '$lib/types/types';
import { getDatasetsConfigPath, getRepoRoot } from '$lib/server/paths';
Expand All @@ -17,9 +18,21 @@ export interface DatasetConfig {

const repoRoot = getRepoRoot();
const defaultRegistryPath = getDatasetsConfigPath();
const require = createRequire(import.meta.url);

type PreparedStatement = {
get(...params: unknown[]): unknown;
all(...params: unknown[]): unknown[];
};

export interface ReadonlyDatasetDb {
prepare(sql: string): PreparedStatement;
close(): void;
}

const datasetDbCache = new Map<
string,
{ db: Database.Database; dbPath: string; mtimeMs: number }
{ db: ReadonlyDatasetDb; dbPath: string; mtimeMs: number }
>();
const datasetDefaultStartCache = new Map<
string,
Expand Down Expand Up @@ -197,7 +210,25 @@ export function getDatasetDbPath(datasetId: string): string {
return getDatasetConfig(datasetId).db_path;
}

export function getDatasetDb(datasetId: string): Database.Database {
function openNodeSqliteDatabase(dbPath: string): ReadonlyDatasetDb {
const { DatabaseSync } = require('node:sqlite') as typeof import('node:sqlite');
type SQLInputValue = import('node:sqlite').SQLInputValue;
const db = new DatabaseSync(dbPath, { open: true, readOnly: true });
return {
prepare(sql: string): PreparedStatement {
const stmt = db.prepare(sql);
return {
get: (...params: unknown[]) => stmt.get(...(params as SQLInputValue[])),
all: (...params: unknown[]) => stmt.all(...(params as SQLInputValue[]))
};
},
close() {
db.close();
}
};
}
Comment on lines +213 to +229
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🔴 Critical

🧩 Analysis chain

🌐 Web query:

What Node.js version introduced node:sqlite module?

💡 Result:

Node.js version 22.5.0 introduced the node:sqlite module.

Citations:


Add graceful error handling for node:sqlite module availability.

The node:sqlite module was introduced in Node.js 22.5.0 and is not available in earlier versions. The pipeline failure shows ERR_UNKNOWN_BUILTIN_MODULE, indicating the environment uses an older Node.js version. The current code path will fail when this module cannot be loaded, without clear error recovery.

Add explicit error handling for module import failures and document the minimum Node.js version requirement. Consider whether the environment guarantees Node.js 22.5.0+, or implement a fallback mechanism that handles missing module gracefully.

🧰 Tools
🪛 GitHub Actions: CI

[error] 214-214: Failed to derive default start date for dataset 'alpha'. better-sqlite3 failed to load; fallback to node:sqlite failed with 'ERR_UNKNOWN_BUILTIN_MODULE' / Error: No such built-in module: node:sqlite.

🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@apps/web/src/lib/server/datasets.ts` around lines 213 - 229, The
openNodeSqliteDatabase function currently requires 'node:sqlite' unguarded and
will crash on pre-22.5 Node versions; wrap the require('node:sqlite') call in a
try/catch inside openNodeSqliteDatabase, detect module-load errors (e.g.
ERR_UNKNOWN_BUILTIN_MODULE) and either throw a clear, actionable Error that
states the minimum Node.js version required (22.5.0+) and how to upgrade, or
return a graceful fallback (e.g. a ReadonlyDatasetDb implementation that throws
meaningful errors on use), and update any docs/comments to record the Node.js
requirement; refer to openNodeSqliteDatabase, DatabaseSync, and
PreparedStatement when making the change.


export function getDatasetDb(datasetId: string): ReadonlyDatasetDb {
const dbPath = getDatasetDbPath(datasetId);
if (!fs.existsSync(dbPath)) {
throw new Error(`Dataset database not found for '${datasetId}' at ${dbPath}`);
Expand All @@ -217,7 +248,18 @@ export function getDatasetDb(datasetId: string): Database.Database {
}
}

const db = new Database(dbPath, { readonly: true });
let db: ReadonlyDatasetDb;
try {
db = new Database(dbPath, { readonly: true });
} catch (error) {
if (error instanceof Error && 'code' in error && error.code === 'ERR_DLOPEN_FAILED') {
console.warn(`better-sqlite3 failed to load for '${datasetId}', falling back to node:sqlite`);
db = openNodeSqliteDatabase(dbPath);
} else {
throw error;
}
}
Comment on lines +251 to +261
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟡 Minor

Missing error handling when node:sqlite fallback also fails.

If openNodeSqliteDatabase throws (e.g., node:sqlite unavailable), the error propagates up. While callers like getDatasetDefaultStartDate have their own catch blocks, other callers of getDatasetDb may not handle this gracefully. Consider logging a more informative error when both backends fail.

💡 Suggested improvement
 	try {
 		db = new Database(dbPath, { readonly: true });
 	} catch (error) {
 		if (error instanceof Error && 'code' in error && error.code === 'ERR_DLOPEN_FAILED') {
 			console.warn(`better-sqlite3 failed to load for '${datasetId}', falling back to node:sqlite`);
-			db = openNodeSqliteDatabase(dbPath);
+			try {
+				db = openNodeSqliteDatabase(dbPath);
+			} catch (fallbackError) {
+				console.error(`node:sqlite fallback also failed for '${datasetId}':`, fallbackError);
+				throw new Error(
+					`Failed to open database for '${datasetId}': better-sqlite3 and node:sqlite both unavailable`
+				);
+			}
 		} else {
 			throw error;
 		}
 	}
🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@apps/web/src/lib/server/datasets.ts` around lines 251 - 261, The current
error handling in the getDatasetDb function catches failures from the
better-sqlite3 loading fallback to node:sqlite, but if openNodeSqliteDatabase
also throws an error, it is not caught or logged. To fix this, add a nested
try-catch around the call to openNodeSqliteDatabase to catch any errors it
throws, log a detailed error message indicating both backends failed to load the
database for the given datasetId, and then rethrow the error to ensure upstream
awareness.


datasetDbCache.set(datasetId, { db, dbPath, mtimeMs: stat.mtimeMs });
return db;
}
Expand Down
2 changes: 1 addition & 1 deletion apps/web/src/routes/api/netflow/stats/+server.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ const BUCKET_SIZES: Record<string, number> = {
*/
function getBucketStartQuery(groupBy: string): string {
const bucketSize = BUCKET_SIZES[groupBy] ?? BUCKET_SIZES.date;
return `(CAST(strftime('%s', datetime(timestamp, 'unixepoch', 'localtime')) AS integer) / ${bucketSize}) * ${bucketSize}`;
return `(CAST(strftime('%s', datetime(timestamp, 'unixepoch', 'localtime', 'start of day', 'utc', printf('+%d seconds', ((CAST(strftime('%s', datetime(timestamp, 'unixepoch', 'localtime')) AS integer) - CAST(strftime('%s', datetime(timestamp, 'unixepoch', 'localtime', 'start of day')) AS integer)) / ${bucketSize}) * ${bucketSize}))) AS integer))`;
}

export const GET: RequestHandler = async ({ url }) => {
Expand Down
101 changes: 85 additions & 16 deletions apps/web/tests/lib/server/datasets.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,25 +4,54 @@ import path from 'path';
import { spawnSync } from 'child_process';
import { afterEach, describe, expect, it, vi } from 'vitest';

vi.mock('better-sqlite3', () => ({
default: class MockDatabase {
constructor(private readonly dbPath: string) {}

prepare(query: string) {
return {
get: () => {
const result = spawnSync('sqlite3', [this.dbPath, query], { encoding: 'utf-8' });
if (result.status !== 0) {
throw new Error(result.stderr || 'sqlite3 query failed');
}
class MockDatabaseSync {
constructor(private readonly dbPath: string) {}

prepare(query: string) {
return {
get: () => {
const result = spawnSync('sqlite3', [this.dbPath, query], { encoding: 'utf-8' });
if (result.status !== 0) {
throw new Error(result.stderr || 'sqlite3 query failed');
}

const minTimestamp = Number(result.stdout.trim());
return { minTimestamp: Number.isFinite(minTimestamp) ? minTimestamp : null };
},
all: () => []
};
}

close() {}
}

const minTimestamp = Number(result.stdout.trim());
return { minTimestamp: Number.isFinite(minTimestamp) ? minTimestamp : null };
const betterSqlite3Factory = vi.fn((dbPath: string) => ({
prepare(query: string) {
return {
get: () => {
const result = spawnSync('sqlite3', [dbPath, query], { encoding: 'utf-8' });
if (result.status !== 0) {
throw new Error(result.stderr || 'sqlite3 query failed');
}
};
}

close() {}
const minTimestamp = Number(result.stdout.trim());
return { minTimestamp: Number.isFinite(minTimestamp) ? minTimestamp : null };
}
};
},
close() {}
}));

vi.mock('better-sqlite3', () => ({
default: vi.fn().mockImplementation((dbPath: string) => betterSqlite3Factory(dbPath))
}));
Comment on lines +28 to 47
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟠 Major

Mock structure doesn't support testing the node:sqlite fallback path.

The betterSqlite3Factory mock uses the sqlite3 CLI to execute real queries against the seeded database. However, when mockImplementationOnce throws to simulate better-sqlite3 failure, the production code falls back to node:sqlite, which:

  1. Isn't mocked in this test
  2. May not be available in the Node.js version used by CI
  3. Won't automatically use the same query execution pattern as the mock

To properly test the fallback, you need to also mock node:sqlite or use conditional test skipping based on Node.js version availability.

💡 Suggested approach: Skip test if node:sqlite unavailable
-	it('falls back to node:sqlite when better-sqlite3 fails to load', async () => {
+	it('falls back to node:sqlite when better-sqlite3 fails to load', async () => {
+		// Skip if node:sqlite is not available (requires Node 22.5+)
+		try {
+			require('node:sqlite');
+		} catch {
+			console.log('Skipping test: node:sqlite not available');
+			return;
+		}
+
 		const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'datasets-test-'));

Or use Vitest's it.skipIf:

const nodeSqliteAvailable = (() => {
	try {
		require('node:sqlite');
		return true;
	} catch {
		return false;
	}
})();

it.skipIf(!nodeSqliteAvailable)('falls back to node:sqlite when better-sqlite3 fails to load', async () => {
🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@apps/web/tests/lib/server/datasets.test.ts` around lines 7 - 26, The test
currently only mocks better-sqlite3 (betterSqlite3Factory /
vi.mock('better-sqlite3')) so the fallback path to node:sqlite is untested and
can fail in CI; either mock 'node:sqlite' to provide the same minimal API your
production code expects (e.g., open/Database or whatever exported functions your
code uses) when betterSqlite3Factory is forced to throw via
mockImplementationOnce, or guard/skip the test when node:sqlite is not present
by detecting availability (try { require('node:sqlite') } catch { /* skip */ })
and using Vitest's it.skipIf to skip the fallback test—update the test that
asserts "falls back to node:sqlite when better-sqlite3 fails to load" to use one
of these two approaches and reference betterSqlite3Factory and the mocked module
names ('better-sqlite3' and 'node:sqlite') so the CI reliably exercises or skips
the fallback.


vi.mock('node:module', async () => ({
createRequire: () => (specifier: string) => {
if (specifier === 'node:sqlite') {
return { DatabaseSync: MockDatabaseSync };
}
throw new Error(`Unexpected require: ${specifier}`);
}
}));

Expand All @@ -34,6 +63,7 @@ async function loadDatasetsModule() {
describe('dataset server helpers', () => {
afterEach(() => {
vi.unstubAllEnvs();
betterSqlite3Factory.mockClear();
});

it('lists dataset summaries from registry + sqlite min timestamp', async () => {
Expand Down Expand Up @@ -115,4 +145,43 @@ describe('dataset server helpers', () => {
expect(datasets.listDatasetSources('alpha')).toEqual(['r1', 'r2']);
expect(() => datasets.getDatasetConfig('missing')).toThrow(/Unknown dataset 'missing'/);
});

it('falls back to node:sqlite when better-sqlite3 fails to load', async () => {
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'datasets-test-'));
const dbPath = path.join(tempDir, 'netflow.sqlite');
const registryPath = path.join(tempDir, 'datasets.json');

const seedResult = spawnSync(
'sqlite3',
[
dbPath,
'CREATE TABLE netflow_stats (timestamp INTEGER NOT NULL); INSERT INTO netflow_stats (timestamp) VALUES (1740823200);'
],
{ encoding: 'utf-8' }
);
expect(seedResult.status).toBe(0);

fs.writeFileSync(
registryPath,
JSON.stringify([
{
dataset_id: 'alpha',
label: 'Alpha Label',
root_path: tempDir,
db_path: dbPath
}
])
);

betterSqlite3Factory.mockImplementationOnce(() => {
const error = new Error('Module did not self-register') as Error & { code: string };
error.code = 'ERR_DLOPEN_FAILED';
throw error;
});

vi.stubEnv('DATASETS_CONFIG_PATH', registryPath);

const datasets = await loadDatasetsModule();
expect(datasets.getDatasetDefaultStartDate('alpha')).toBe('2025-03-01');
});
Comment on lines +149 to +186
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🔴 Critical

🧩 Analysis chain

🏁 Script executed:

#!/bin/bash
# Check Node.js version to verify node:sqlite availability (requires Node 22.5+)
node --version

# Check if node:sqlite is available
node -e "try { require('node:sqlite'); console.log('node:sqlite available'); } catch(e) { console.log('node:sqlite NOT available:', e.message); }"

Repository: flamboh/netflow-analysis

Length of output: 279


🏁 Script executed:

# Read the test file to understand the implementation
head -160 apps/web/tests/lib/server/datasets.test.ts | tail -45

Repository: flamboh/netflow-analysis

Length of output: 1385


🏁 Script executed:

# Find and read the datasets.ts file to understand the fallback logic
fd 'datasets\.ts$' --type f | grep -v test | head -1

Repository: flamboh/netflow-analysis

Length of output: 96


🏁 Script executed:

# Read the datasets.ts file to understand the fallback logic
cat -n apps/web/src/lib/datasets.ts | head -200

Repository: flamboh/netflow-analysis

Length of output: 2404


🏁 Script executed:

# Find server-side datasets file
fd 'datasets\.ts$' --type f apps/web/src/lib/server/ 2>/dev/null

Repository: flamboh/netflow-analysis

Length of output: 103


🏁 Script executed:

# Search for getFallbackStartDate function
rg 'getFallbackStartDate' --type ts

Repository: flamboh/netflow-analysis

Length of output: 282


🏁 Script executed:

# Read the server-side datasets.ts file
cat -n apps/web/src/lib/server/datasets.ts | head -250

Repository: flamboh/netflow-analysis

Length of output: 8955


🏁 Script executed:

# Continue reading to see the complete getDatasetDb implementation
cat -n apps/web/src/lib/server/datasets.ts | sed -n '250,280p'

Repository: flamboh/netflow-analysis

Length of output: 859


🏁 Script executed:

# Check the test file setup to see how mocks are configured
head -40 apps/web/tests/lib/server/datasets.test.ts

Repository: flamboh/netflow-analysis

Length of output: 1227


🏁 Script executed:

# Check if there are any environment setup or mocking issues in the test
grep -n "node:sqlite" apps/web/tests/lib/server/datasets.test.ts

Repository: flamboh/netflow-analysis

Length of output: 152


🏁 Script executed:

# Check if node:sqlite requires special handling in the test environment
grep -n "vi.mock" apps/web/tests/lib/server/datasets.test.ts

Repository: flamboh/netflow-analysis

Length of output: 105


🏁 Script executed:

# Check if the database seeding command works (recreate the test scenario)
tempDir=$(mktemp -d)
dbPath="$tempDir/test.sqlite"
sqlite3 "$dbPath" "CREATE TABLE netflow_stats (timestamp INTEGER NOT NULL); INSERT INTO netflow_stats (timestamp) VALUES (1740823200);"
echo "Seeding status: $?"

# Test if node:sqlite can read this database
node -e "
try {
  const { DatabaseSync } = require('node:sqlite');
  const db = new DatabaseSync('$dbPath', { open: true, readOnly: true });
  const row = db.prepare('SELECT MIN(timestamp) AS minTimestamp FROM netflow_stats').get();
  console.log('Query result:', row);
  db.close();
} catch (e) {
  console.log('Error:', e.message);
}
"

# Clean up
rm -rf "$tempDir"

Repository: flamboh/netflow-analysis

Length of output: 351


🏁 Script executed:

# Check if the test has any skip conditions or checks for sqlite3
grep -B5 -A5 "spawnSync" apps/web/tests/lib/server/datasets.test.ts | head -20

Repository: flamboh/netflow-analysis

Length of output: 804


🏁 Script executed:

# Check what the actual error is when running the test
grep -n "spawnSync" apps/web/tests/lib/server/datasets.test.ts

Repository: flamboh/netflow-analysis

Length of output: 267


🏁 Script executed:

# Check the exact test assertion for seedResult
sed -n '119,160p' apps/web/tests/lib/server/datasets.test.ts | grep -n "seedResult"

Repository: flamboh/netflow-analysis

Length of output: 141


Test fails because database seeding requires the sqlite3 CLI tool, which is unavailable in the test environment.

The test attempts to seed a SQLite database using spawnSync('sqlite3', ...) at line 124, but the sqlite3 command-line tool is not available in the test environment. This causes the seeding to fail with status code 127, preventing the database from being created. The test assertion at line 138 (expect(seedResult.status).toBe(0)) should fail at this point.

To fix this, either:

  • Install the sqlite3 CLI tool in the test environment, or
  • Use an alternative seeding approach that doesn't depend on external CLI tools (e.g., directly use node:sqlite or a Node.js library to create and populate the test database)

Note: node:sqlite is available (Node.js v24.3.0) and working correctly.

🧰 Tools
🪛 GitHub Actions: CI

[error] 155-155: Test failed: expected '2026-02-22' to be '2025-03-01' (Object.is equality).

🪛 GitHub Check: Web + Python Checks

[failure] 155-155: tests/lib/server/datasets.test.ts > dataset server helpers > falls back to node:sqlite when better-sqlite3 fails to load
AssertionError: expected '2026-02-22' to be '2025-03-01' // Object.is equality

Expected: "2025-03-01"
Received: "2026-02-22"

❯ tests/lib/server/datasets.test.ts:155:56

🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@apps/web/tests/lib/server/datasets.test.ts` around lines 119 - 156, The test
seeds the SQLite DB by calling spawnSync('sqlite3', ...) which fails in CI
because the sqlite3 CLI is not present; replace that external call with an
in-process DB creation using Node's builtin sqlite API (or a lightweight Node
sqlite library) to create the file at dbPath and run the SQL statements used in
the test (CREATE TABLE ...; INSERT ...), then remove the
expect(seedResult.status).toBe(0) assertion and proceed with writing
registryPath and the rest of the test; keep references to dbPath, registryPath,
loadDatasetsModule, and the betterSqlite3Factory.mockImplementationOnce stub so
the test still simulates the fallback path.

});
4 changes: 3 additions & 1 deletion apps/web/tests/routes/api-netflow-stats.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -40,9 +40,10 @@ describe('/api/netflow/stats GET', () => {
bytesOther: 14
}
]);
const prepare = vi.fn().mockReturnValue({ all });
vi.mocked(getRequestedDataset).mockReturnValue('alpha');
vi.mocked(getDatasetDb).mockReturnValue({
prepare: vi.fn().mockReturnValue({ all })
prepare
} as never);

const response = await GET({
Expand Down Expand Up @@ -75,6 +76,7 @@ describe('/api/netflow/stats GET', () => {
]
});
expect(all).toHaveBeenCalledWith('r1', 'r2', '1', '2');
expect(prepare).toHaveBeenCalledWith(expect.stringContaining("'start of day', 'utc'"));
});

it('returns 500 when the database query fails', async () => {
Expand Down
39 changes: 0 additions & 39 deletions term-update-accomplishments-2025-12-15.md

This file was deleted.

14 changes: 0 additions & 14 deletions term-update.md

This file was deleted.

66 changes: 66 additions & 0 deletions tests/python/test_discovery.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,3 +111,69 @@ def test_scan_filesystem_skips_unparseable_and_pre_start_files(
rows = list(discovery.scan_filesystem())

assert rows == []


def test_get_stale_days_uses_local_day_boundaries() -> None:
common, discovery = load_modules()
conn = sqlite3.connect(':memory:')
common.init_processed_files_table(conn)

same_local_day = [
(
'/captures/r1/2025/03/05/nfcapd.202503050045',
'r1',
common.timestamp_to_unix(datetime(2025, 3, 5, 0, 45)),
1,
),
(
'/captures/r1/2025/03/05/nfcapd.202503052355',
'r1',
common.timestamp_to_unix(datetime(2025, 3, 5, 23, 55)),
None,
),
]
conn.executemany(
'INSERT INTO processed_files (file_path, router, timestamp, flow_stats_status) VALUES (?, ?, ?, ?)',
same_local_day,
)

assert discovery.get_stale_days(conn, 'flow_stats') == {
('r1', common.timestamp_to_unix(datetime(2025, 3, 5, 0, 0)))
}


def test_sync_processed_files_table_updates_mirrored_path_without_duplication(
monkeypatch: pytest.MonkeyPatch,
) -> None:
common, discovery = load_modules()
conn = sqlite3.connect(':memory:')
common.init_processed_files_table(conn)

ts = common.timestamp_to_unix(datetime(2025, 3, 2, 0, 0))
conn.execute(
'INSERT INTO processed_files (file_path, router, timestamp, file_exists) VALUES (?, ?, ?, ?)',
('/old-root/r1/2025/03/02/nfcapd.202503020000', 'r1', ts, 1),
)

monkeypatch.setattr(discovery, 'AVAILABLE_ROUTERS', ['r1'])
monkeypatch.setattr(discovery, 'DATA_START_DATE', datetime(2025, 3, 1, 0, 0))
monkeypatch.setattr(
discovery,
'scan_filesystem',
lambda discovery_window_days=0: iter(
[('/new-root/r1/2025/03/02/nfcapd.202503020000', 'r1', datetime(2025, 3, 2, 0, 0))]
),
)

stats = discovery.sync_processed_files_table(
conn,
include_gaps=False,
reprocess_window_days=0,
discovery_window_days=0,
)

row = conn.execute(
'SELECT file_path, router, timestamp, file_exists FROM processed_files'
).fetchone()
assert stats == {'discovered': 1, 'new_files': 0, 'gaps': 0}
assert row == ('/new-root/r1/2025/03/02/nfcapd.202503020000', 'r1', ts, 1)
37 changes: 37 additions & 0 deletions tests/python/test_flow_db.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,3 +56,40 @@ def test_batch_insert_results_inserts_successful_rows() -> None:
).fetchone()
assert inserted == 1
assert row == ('/tmp/a', 'r1', 3, 4, 5, 0)


def test_batch_insert_results_replaces_mirrored_path_duplicate() -> None:
_, flow_db = load_modules()
conn = sqlite3.connect(':memory:')
flow_db.init_netflow_stats_table(conn)
conn.execute(
"""
INSERT INTO netflow_stats (
file_path, router, timestamp,
flows, flows_tcp, flows_udp, flows_icmp, flows_other,
packets, packets_tcp, packets_udp, packets_icmp, packets_other,
bytes, bytes_tcp, bytes_udp, bytes_icmp, bytes_other,
first_timestamp, last_timestamp, msec_first, msec_last, sequence_failures
) VALUES (?, ?, ?, ?, 0, 0, 0, 0, ?, 0, 0, 0, 0, ?, 0, 0, 0, 0, 0, 0, 0, 0, 0)
""",
('/old-root/a', 'r1', 123, 3, 4, 5),
)

inserted = flow_db.batch_insert_results(
conn,
[
{
'file_path': '/new-root/a',
'router': 'r1',
'timestamp': 123,
'success': True,
'data': {'flows': 7, 'packets': 8, 'bytes': 9},
}
],
)

rows = conn.execute(
'SELECT file_path, router, timestamp, flows, packets, bytes FROM netflow_stats'
).fetchall()
assert inserted == 1
assert rows == [('/new-root/a', 'r1', 123, 7, 8, 9)]
Loading
Loading