Compare commits

..

27 Commits
0.6.0 ... main

Author SHA1 Message Date
252776e0a7
version: 0.10.0 2025-01-31 11:53:16 +08:00
2ce726d16c
update: scheduler can now prevent selected tasks from running under high load 2025-01-25 23:25:11 +08:00
96b4cecaec
fix: incorrect logic when removing files 2025-01-21 03:40:40 +08:00
94bd14db52
update: remove maxInterval in scheduler
fix: unlinking a file without handling the case of file not exist
2025-01-21 02:12:17 +08:00
fb0c60a71e
add: blurred background in rewind page when ratio mismatch 2025-01-21 01:37:20 +08:00
3d76a5ece5
add: type for recognition 2025-01-06 23:42:11 +08:00
094f8a0925
add: scheduler 2025-01-06 23:14:42 +08:00
496c00e7e3
update: directory structure for binary files 2025-01-06 21:51:48 +08:00
95fa5b4ac7
update: README.md 2025-01-02 01:02:31 +08:00
ee5dff0d6f
update: README.md 2025-01-02 00:59:46 +08:00
6f4a1c61df
update: README 2025-01-02 00:45:53 +08:00
a0a90f2428
ref: better import for utils
fix: inappropriate ffmpeg path in `immediatelyExtractFrameFromVideo()`
2025-01-02 00:26:50 +08:00
38185cc969
fix: wrong path in builder config 2025-01-02 00:06:25 +08:00
004e1a7eef
version: 0.8.0 2025-01-01 23:37:11 +08:00
497c73725c
update: index.tsx 2025-01-01 18:47:22 +08:00
408778b8b9
feature: remove rewind window from recording 2025-01-01 16:49:32 +08:00
d4f14b97b0
improve: optimize timeline scrolling 2025-01-01 16:09:45 +08:00
f778c4f44b
feature: a usable timeline 2025-01-01 04:46:57 +08:00
a43a563609
ref: utils dir 2025-01-01 02:56:51 +08:00
97dce81297
add: decoding frame in backend server 2025-01-01 02:22:14 +08:00
71d00103dd
fix: correctly handle the situation where the screenshot file does not exist 2024-12-29 23:00:57 +08:00
d2899d4b50
ref: format 2024-12-29 22:35:25 +08:00
bc0483cdc8
improve: better logic for composing FFmpeg command
add: hardware encoding when available
2024-12-29 22:33:31 +08:00
bf7be530a1
update: use untilID as param of /timeline instead of offset 2024-12-29 21:48:56 +08:00
f53616f345
add: endpoint /timeline & /frame/:id in backend server
improve: simplify the migration code
2024-12-29 21:37:36 +08:00
fb70acab00
add: the backend server for the app
it is used for communication between Web & Electron.
2024-12-28 22:12:34 +08:00
8cf17838f4
fix: wrong field type in migrateToV3.ts
improve: use single threaded ffmpeg to reduce peak CPU utilization
update: database-changelog.md & database-structure.md
2024-12-26 18:12:20 +08:00
68 changed files with 1854 additions and 9057 deletions

View File

@ -2,37 +2,38 @@
OpenRewind is an open-source alternative to [rewind.ai](https://rewind.ai), forked from [OpenRecall](https://github.com/openrecall/openrecall).
We wanted to create an open source app that provides similar core functionality
We want to create an open source app that provides similar core functionality
to rewind.ai, and that app is **OpenRewind**.
## Alpha Release: 0.8.0
Latest works: There is an Alpha version available! We currently only support Apple Silicon Macs.
(Of course, thanks to building on Electron, there will definitely be support for multiple platforms in the beta/stable release)
### ✨ Features
- GUI app. No terminal windows, no need to install any dependencies
- Take a screenshot of your screen every 2 seconds
- Encode screenshots to video at regular intervals
- A full screen "rewind" page similar to Rewind, with scrolling to view captured screenshots
- Screenshots can be taken excluding the "rewind" window
## To-dos
### Update the OCR Engine
### OCR optimized for the specific platform
OpenRecall currently uses docTR as its OCR engine, but it performs inadequately.
On my MacBook Air M2 (2022), processing a screenshot takes around 20 seconds, with CPU usage peaking at over 400%.
During this time, screenshots cannot be captured, and the engine appears to recognize only Latin characters.
We will use the OCR API provided by the OS for macOS and Windows.
To address this, we plan to replace the OCR with a more efficient alternative that supports multiple writing systems.
We are working on [RapidOCR ONNX](https://github.com/alikia2x/RapidOCR-ONNX), a fork of a project which has same name,
developed by RapidAI.
RapidOCR ONNX uses [PaddleOCR](https://github.com/PaddlePaddle/PaddleOCR) as its model architecture, and
runs on the [ONNX Runtime](https://github.com/microsoft/onnxruntime/).
Reference projects:
- [ocrit](https://github.com/insidegui/ocrit/)
> We [forked](https://github.com/alikia2x/ocrit) this project to suit our needs
- [Windows.Media.Ocr.Cli](https://github.com/zh-h/Windows.Media.Ocr.Cli)
### Implement a Task Queue/Scheduler
### Big-little architecture optimizations for Apple Silicon
Currently, OpenRecall's OCR recognition and database operations are synchronous (blocking).
This results in increased screenshot frequency, as described in the previous section.
We wrote a small Swift program that allows a given program to run at a selected QoS class. On ARM Mac, this means we can put some work (such as video encoding) to Efficient cores, reducing peak CPU usage and power consumption.
Our next goal is to introduce a task queue to handle high-load tasks (such as OCR, indexing, and archiving) asynchronously. This will ensure that time-sensitive tasks (like capturing screenshots) are prioritized.
### Improve the Frontend
The current frontend of OpenRecall is quite basic. Given my expertise in web development,
I will build a more elegant frontend from scratch.
We are now switched to Electron in order to deliver a native experience,
aiming to match the functionality of [rewind.ai](https://rewind.ai).
> See: [Prioritize Work with Quality of Service Classes](https://developer.apple.com/library/archive/documentation/Performance/Conceptual/EnergyGuide-iOS/PrioritizeWorkWithQoS.html)
### Add More Features

BIN
bun.lockb Executable file

Binary file not shown.

2
bunfig.toml Normal file
View File

@ -0,0 +1,2 @@
[install.scopes]
"@jsr" = "https://npm.jsr.io"

View File

@ -7,13 +7,21 @@ export default function IconWithText() {
const { t } = useTranslation();
return (
<div className="flex">
<img src={imgUrl} className="h-20 w-20 mr-2" alt="OpenRewind icon"/>
<img src={imgUrl} className="h-20 w-20 mr-2" alt="OpenRewind icon" />
<div className="flex flex-col justify-start w-auto h-[4.2rem] overflow-hidden mt-1">
<span className="text-2xl font-semibold">OpenRewind</span>
<span className="text-sm text-gray-700 dark:text-gray-200
font-medium ml-0.5">{t("settings.version", { version: pjson.version })}</span>
<span className="text-xs text-gray-700 dark:text-gray-200
font-medium ml-0.5">{t("settings.copyright")}</span>
<span
className="text-sm text-gray-700 dark:text-gray-200
font-medium ml-0.5"
>
{t("settings.version", { version: pjson.version })}
</span>
<span
className="text-xs text-gray-700 dark:text-gray-200
font-medium ml-0.5"
>
{t("settings.copyright")}
</span>
</div>
</div>
);

View File

@ -1,10 +1,14 @@
import { MouseEventHandler } from "react";
import { Icon } from "@iconify-icon/react";
const MenuItem = ({ icon, text, onClick }: {
icon: string,
text: string,
onClick: MouseEventHandler<HTMLDivElement>
const MenuItem = ({
icon,
text,
onClick
}: {
icon: string;
text: string;
onClick: MouseEventHandler<HTMLDivElement>;
}) => {
return (
<div

View File

@ -7,10 +7,8 @@ export default function OpenSourceNote() {
OpenRewind is open source software licensed under
<a href="https://www.gnu.org/licenses/gpl-3.0.html">GPL 3.0</a>.<br />
Source code is avaliable at
<a href="https://github.com/alikia2x/openrewind">
GitHub
</a>.
<a href="https://github.com/alikia2x/openrewind">GitHub</a>.
</Trans>
</p>
)
);
}

View File

@ -1,9 +1,15 @@
import * as React from "react";
import { useRef } from "react";
const SettingsGroup = (
{ children, groupName, addGroupRef }:
{ children: React.ReactNode, groupName: string, addGroupRef: Function }) => {
const SettingsGroup = ({
children,
groupName,
addGroupRef
}: {
children: React.ReactNode;
groupName: string;
addGroupRef: Function;
}) => {
const groupRef = useRef(null);
React.useEffect(() => {

View File

@ -2,9 +2,7 @@ import { useTranslation } from "react-i18next";
const Title = ({ i18nKey }: { i18nKey: string }) => {
const { t } = useTranslation();
return (
<h1 className="text-3xl font-bold leading-[3rem]">{t(i18nKey)}</h1>
);
}
return <h1 className="text-3xl font-bold leading-[3rem]">{t(i18nKey)}</h1>;
};
export default Title;

View File

@ -1,18 +1,123 @@
# Database Schema Documentation
# Database Schema Changelog
This document outlines the changes made across different versions of
database structure used in the OpenRewind, including tables and fields.
## Version 2 Schema Changes
## Version 3 Schema Changes
Cooresponding version: Since 0.4.0
Corresponding version: Since 0.5.0
### Update `encoding_task` Table
#### Change `createAt` to `createdAt`
The column `createAt` was renamed to `createdAt` for consistency.
```sql
ALTER TABLE encoding_task RENAME COLUMN createAt TO createdAt;
```
#### Convert `createdAt` to Unix Timestamp
The `createdAt` column was updated to store Unix timestamps instead of formatted timestamps.
```typescript
const rows = db.prepare(`SELECT id, createdAt FROM encoding_task`).all() as {
[x: string]: unknown;
id: unknown;
}[];
const updateStmt = db.prepare(`UPDATE encoding_task SET createdAt_new = ? WHERE id = ?`);
rows.forEach((row) => {
const unixTimestamp = convertTimestampToUnix(row.createdAt as string);
updateStmt.run(unixTimestamp, row.id);
});
```
### Update `frame` Table
#### Change `createAt` to `createdAt`
The column `createAt` was renamed to `createdAt` for consistency.
```sql
ALTER TABLE frame RENAME COLUMN createAt TO createdAt;
```
#### Convert `createdAt` to Unix Timestamp
The `createdAt` column was updated to store Unix timestamps instead of formatted timestamps.
```typescript
const rows = db.prepare(`SELECT id, createdAt FROM frame`).all() as {
[x: string]: unknown;
id: unknown;
}[];
const updateStmt = db.prepare(`UPDATE frame SET createdAt_new = ? WHERE id = ?`);
rows.forEach((row) => {
const unixTimestamp = convertTimestampToUnix(row.createdAt as string);
updateStmt.run(unixTimestamp, row.id);
});
```
### Update `segments` Table
#### Rename Columns for Consistency
The columns `startAt` and `endAt` were renamed to `startedAt` and `endedAt` respectively.
```sql
ALTER TABLE segments RENAME COLUMN startAt TO startedAt;
ALTER TABLE segments RENAME COLUMN endAt TO endedAt;
```
#### Convert `startedAt` and `endedAt` to Unix Timestamps
The `startedAt` and `endedAt` columns were updated to store Unix timestamps instead of formatted timestamps.
```typescript
const rows = db.prepare(`SELECT id, startedAt, endedAt FROM segments`).all() as {
[x: string]: unknown;
id: unknown;
}[];
const updateStart = db.prepare(`UPDATE segments SET startedAt_new = ? WHERE id = ?`);
const updateEnd = db.prepare(`UPDATE segments SET endedAt_new = ? WHERE id = ?`);
rows.forEach((row) => {
updateStart.run(convertTimestampToUnix(row.startedAt as string), row.id);
updateEnd.run(convertTimestampToUnix(row.endedAt as string), row.id);
});
```
### Drop Deprecated `encoded` Column
The deprecated `encoded` column was removed from the `frame` table.
```sql
ALTER TABLE frame DROP COLUMN encoded;
```
### Summary of Changes
- **Update `encoding_task` Table:**
- Renamed `createAt` to `createdAt`.
- Converted `createdAt` to store Unix timestamps.
- **Update `frame` Table:**
- Renamed `createAt` to `createdAt`.
- Converted `createdAt` to store Unix timestamps.
- Dropped the deprecated `encoded` column.
- **Update `segments` Table:**
- Renamed `startAt` and `endAt` to `startedAt` and `endedAt` respectively.
- Converted `startedAt` and `endedAt` to store Unix timestamps.
## Version 2 Schema
Corresponding version: 0.4.0
### New Table: `config`
Stores configuration data, including the database version.
| Column Name | Data Type | Constraints/Default | Description |
|-------------|-----------|---------------------|-----------------------------------------------------------------------------|
| ----------- | --------- | ------------------- | -------------------------------------- |
| `key` | TEXT | PRIMARY KEY | Unique key for configuration settings. |
| `value` | TEXT | | Value associated with the key. |
@ -27,7 +132,7 @@ INSERT INTO config (key, value) VALUES ('version', '2');
Stores encoding tasks that are queued for processing.
| Column Name | Data Type | Constraints/Default | Description |
|-------------|-----------|----------------------------|--------------------------------------|
| ----------- | --------- | -------------------------- | ------------------------------------ |
| `id` | INTEGER | PRIMARY KEY, AUTOINCREMENT | Unique ID for the task. |
| `createAt` | TIMESTAMP | DEFAULT CURRENT_TIMESTAMP | Timestamp when the task was created. |
| `status` | INTEGER | DEFAULT 0 | Indicates the status of the task. |
@ -62,11 +167,10 @@ END;
Stores the frames that need to be encoded for the encoding task
| Column Name | Data Type | Constraints/Default | Description |
|------------------|-----------|-------------------------------------|------------------------------------------------------|
| ---------------- | --------- | ----------------------------------- | ---------------------------------------------------- |
| `frame` | INTEGER | PRIMARY KEY, FOREIGN KEY (frame.id) | ID for the frame associated with the encoding task. |
| `encodingTaskID` | TIMESTAMP | FOREIGN KEY (encoding_task.id) | ID for the encoding task associated with this frame. |
### Update `frame` Table
#### Simplify `imgFilename`
@ -74,11 +178,11 @@ Stores the frames that need to be encoded for the encoding task
The `imgFilename` column was updated to store only the filename without the full path.
```typescript
const rows = db.prepare('SELECT id, imgFilename FROM frame').all() as OldFrame[];
rows.forEach(row => {
const rows = db.prepare("SELECT id, imgFilename FROM frame").all() as OldFrame[];
rows.forEach((row) => {
const filename = row.imgFilename.match(/[^\\/]+$/)?.[0];
if (filename) {
db.prepare('UPDATE frame SET imgFilename = ? WHERE id = ?').run(filename, row.id);
db.prepare("UPDATE frame SET imgFilename = ? WHERE id = ?").run(filename, row.id);
}
});
```
@ -102,17 +206,16 @@ UPDATE frame SET encodeStatus = CASE WHEN encoded THEN 2 ELSE 0 END;
- The `encoded` column is no longer used and is retained due to SQLite's inability to drop columns.
Creating a new table without this column and copying data to the new table could be time-consuming.
## Version 1 Schema
Cooresponding version: 0.3.x
Corresponding version: 0.3.x
### Table: `frame`
Stores information about individual frames.
| Column Name | Data Type | Constraints/Default | Description |
|-------------------|-----------|---------------------------------|-------------------------------------------------------------------------|
| ----------------- | --------- | ------------------------------- | ----------------------------------------------------------------------- |
| `id` | INTEGER | PRIMARY KEY, AUTOINCREMENT | Unique identifier for each frame. |
| `createAt` | TIMESTAMP | DEFAULT CURRENT_TIMESTAMP | Timestamp when the frame was created. |
| `imgFilename` | TEXT | | Filename of the image associated with the frame. |
@ -127,7 +230,7 @@ Stores information about individual frames.
Stores recognition data associated with frames.
| Column Name | Data Type | Constraints/Default | Desc[database-structure.md](database-structure.md)ription |
|-------------|-----------|----------------------------|-----------------------------------------------------------|
| ----------- | --------- | -------------------------- | --------------------------------------------------------- |
| `id` | INTEGER | PRIMARY KEY, AUTOINCREMENT | Unique identifier for each recognition data entry. |
| `frameID` | INTEGER | FOREIGN KEY (frame.id) | ID of the frame to which the recognition data belongs. |
| `data` | TEXT | | Raw recognition data. |
@ -140,7 +243,7 @@ While capturing the screen, OpenRewind retrieves the currently active window.
When it finds that the currently active window has changed to another application, a new segment will start.
| Column Name | Data Type | Constraints/Default | Description |
|---------------|-----------|----------------------------|------------------------------------------------------|
| ------------- | --------- | -------------------------- | ---------------------------------------------------- |
| `id` | INTEGER | PRIMARY KEY, AUTOINCREMENT | Unique identifier for each segment. |
| `startAt` | TIMESTAMP | | Timestamp when the segment starts. |
| `endAt` | TIMESTAMP | | Timestamp when the segment ends. |
@ -157,7 +260,7 @@ When it finds that the currently active window has changed to another applicatio
Used for full-text search on recognition data.
| Column Name | Data Type | Constraints/Default | Description |
|-------------|-----------|---------------------|--------------------------------------------------------|
| ----------- | --------- | ------------------- | ------------------------------------------------------ |
| `id` | INTEGER | UNINDEXED | ID of the recognition data entry. |
| `frameID` | INTEGER | UNINDEXED | ID of the frame to which the recognition data belongs. |
| `data` | TEXT | | Raw recognition data. |

View File

@ -1,38 +1,34 @@
# Database Schema Documentation (Version 2)
# Database Schema Documentation (Version 3)
This document outlines the current structure of the database schema used in the application.
It includes tables, fields, and their descriptions.
This document outlines the current structure of the database schema used in the application. It includes tables, fields, and their descriptions.
## Table: `config`
Stores configuration data.
| Column Name | Data Type | Constraints/Default | Description |
|-------------|-----------|---------------------|-----------------------------------------------------------------------------|
| ----------- | --------- | ------------------- | -------------------------------------- |
| `key` | TEXT | PRIMARY KEY | Unique key for configuration settings. |
| `value` | TEXT | | Value associated with the key. |
### Key: version
The current database schema version, represented as a integer.
Since the `config` table does not exist in V1, the version must be at least 2.
The current database schema version, represented as an integer. Since the `config` table does not exist in V1, the version must be at least 2.
## Table: `frame`
Stores information about individual frames.
| Column Name | Data Type | Constraints/Default | Description |
|-------------------|-----------|---------------------------------|-----------------------------------------------------------|
| ----------------- | --------- | ------------------------------- | --------------------------------------------------------- |
| `id` | INTEGER | PRIMARY KEY, AUTOINCREMENT | Unique identifier for each frame. |
| `createAt` | TIMESTAMP | DEFAULT CURRENT_TIMESTAMP | Timestamp when the frame was created. |
| `createdAt` | REAL | | Timestamp when the frame was created. |
| `imgFilename` | TEXT | | Filename of the image associated with the frame. |
| `segmentID` | INTEGER | NULL, FOREIGN KEY (segments.id) | ID of the segment to which the frame belongs. |
| `videoPath` | TEXT | NULL | Relative path to the video file if the frame was encoded. |
| `videoFrameIndex` | INTEGER | NULL | Index of the frame within the encoded video. |
| `collectionID` | INTEGER | NULL | ID of the collection to which the frame belongs. |
| `encodeStatus` | INTEGER | 0 | Indicates the encoding status of the frame. |
| `encodeStatus` | INTEGER | DEFAULT 0 | Indicates the encoding status of the frame. |
### Status Description
@ -45,7 +41,7 @@ Stores information about individual frames.
Stores recognition data associated with frames.
| Column Name | Data Type | Constraints/Default | Description |
|-------------|-----------|------------------------------|-----------------------------------------------------------------------------|
| ----------- | --------- | -------------------------- | ------------------------------------------------------ |
| `id` | INTEGER | PRIMARY KEY, AUTOINCREMENT | Unique identifier for each recognition data entry. |
| `frameID` | INTEGER | FOREIGN KEY (frame.id) | ID of the frame to which the recognition data belongs. |
| `data` | TEXT | | Raw recognition data. |
@ -53,15 +49,13 @@ Stores recognition data associated with frames.
## Table: `segments`
A segment is a period of time when a user uses a particular application.
While capturing the screen, OpenRewind detects the currently active window.
When it finds that the currently active window has changed to another application, a new segment will start.
A segment is a period of time when a user uses a particular application. While capturing the screen, OpenRewind detects the currently active window. When it finds that the currently active window has changed to another application, a new segment will start.
| Column Name | Data Type | Constraints/Default | Description |
|---------------|-----------|----------------------------|------------------------------------------------------|
| ------------- | --------- | -------------------------- | ---------------------------------------------------- |
| `id` | INTEGER | PRIMARY KEY, AUTOINCREMENT | Unique identifier for each segment. |
| `startAt` | TIMESTAMP | | Timestamp when the segment starts. |
| `endAt` | TIMESTAMP | | Timestamp when the segment ends. |
| `startedAt` | REAL | | Timestamp when the segment starts. |
| `endedAt` | REAL | | Timestamp when the segment ends. |
| `title` | TEXT | | Title of the segment. |
| `appName` | TEXT | | Name of the application associated with the segment. |
| `appPath` | TEXT | | Path to the application. |
@ -75,9 +69,9 @@ When it finds that the currently active window has changed to another applicatio
Stores encoding tasks that are queued for processing.
| Column Name | Data Type | Constraints/Default | Description |
|-------------|-----------|----------------------------|--------------------------------------|
| ----------- | --------- | -------------------------- | ------------------------------------ |
| `id` | INTEGER | PRIMARY KEY, AUTOINCREMENT | Unique ID for the task. |
| `createAt` | TIMESTAMP | DEFAULT CURRENT_TIMESTAMP | Timestamp when the task was created. |
| `createdAt` | REAL | | Timestamp when the task was created. |
| `status` | INTEGER | DEFAULT 0 | Indicates the status of the task. |
### Task status Description
@ -89,20 +83,19 @@ Stores encoding tasks that are queued for processing.
## Table: `encoding_task_data`
Stores the frames that need to be encoded for the encoding task
Stores the frames that need to be encoded for the encoding task.
| Column Name | Data Type | Constraints/Default | Description |
|------------------|-----------|-------------------------------------|------------------------------------------------------|
| ---------------- | --------- | ----------------------------------- | ---------------------------------------------------- |
| `encodingTaskID` | INTEGER | FOREIGN KEY (encoding_task.id) | ID for the encoding task associated with this frame. |
| `frame` | INTEGER | PRIMARY KEY, FOREIGN KEY (frame.id) | ID for the frame associated with the encoding task. |
| `encodingTaskID` | TIMESTAMP | FOREIGN KEY (encoding_task.id) | ID for the encoding task associated with this frame. |
## Virtual Table: `text_search`
Used for full-text search on recognition data.
| Column Name | Data Type | Constraints/Default | Description |
|-------------|-----------|---------------------|--------------------------------------------------------|
| ----------- | --------- | ------------------- | ------------------------------------------------------ |
| `id` | INTEGER | UNINDEXED | ID of the recognition data entry. |
| `frameID` | INTEGER | UNINDEXED | ID of the frame to which the recognition data belongs. |
| `data` | TEXT | | Raw recognition data. |
@ -112,8 +105,7 @@ Used for full-text search on recognition data.
### `recognition_data_after_insert`
Triggered after inserting a new row into `recognition_data`.
Inserts a new row into `text_search` with the same data.
Triggered after inserting a new row into `recognition_data`. Inserts a new row into `text_search` with the same data.
```sql
CREATE TRIGGER IF NOT EXISTS recognition_data_after_insert AFTER INSERT ON recognition_data
@ -125,8 +117,7 @@ END;
### `recognition_data_after_update`
Triggered after updating a row in `recognition_data`.
Updates the associated `text_search` row.
Triggered after updating a row in `recognition_data`. Updates the associated `text_search` row.
```sql
CREATE TRIGGER IF NOT EXISTS recognition_data_after_update AFTER UPDATE ON recognition_data
@ -139,8 +130,7 @@ END;
### `recognition_data_after_delete`
Triggered after deleting a row from `recognition_data`.
Deletes the associated `text_search` row.
Triggered after deleting a row from `recognition_data`. Deletes the associated `text_search` row.
```sql
CREATE TRIGGER IF NOT EXISTS recognition_data_after_delete AFTER DELETE ON recognition_data
@ -151,11 +141,10 @@ END;
### `delete_encoding_task`
Triggered after updating the `status` of an encoding task to `2` (Completed).
Deletes the associated `encoding_task_data` and `encoding_task` rows.
Triggered after updating the `status` of an encoding task to `2` (Completed). Deletes the associated `encoding_task_data` and `encoding_task` rows.
```sql
CREATE TRIGGER delete_encoding_task
CREATE TRIGGER IF NOT EXISTS delete_encoding_task
AFTER UPDATE OF status
ON encoding_task
BEGIN
@ -165,5 +154,4 @@ BEGIN
DELETE FROM encoding_task
WHERE id = OLD.id AND NEW.status = 2;
END;
```

View File

@ -2,10 +2,7 @@
"appId": "com.alikia2x.openrewind",
"mac": {
"category": "public.app-category.productivity",
"target": "dmg",
"files": [
"bin/macos"
]
"target": "dmg"
},
"productName": "OpenRewind",
"directories": {
@ -20,9 +17,7 @@
],
"win": {
"target": "nsis",
"files": [
"bin/win32"
]
"files": ["bin/win32"]
},
"linux": {
"target": "AppImage"

View File

@ -1,28 +1,37 @@
import js from '@eslint/js'
import globals from 'globals'
import reactHooks from 'eslint-plugin-react-hooks'
import reactRefresh from 'eslint-plugin-react-refresh'
import tseslint from 'typescript-eslint'
import js from "@eslint/js";
import globals from "globals";
import reactHooks from "eslint-plugin-react-hooks";
import reactRefresh from "eslint-plugin-react-refresh";
import tseslint from "typescript-eslint";
export default tseslint.config(
{ ignores: ['dist'] },
{ ignores: ["dist"] },
{
extends: [js.configs.recommended, ...tseslint.configs.recommended],
files: ['**/*.{ts,tsx}'],
files: ["**/*.{ts,tsx}"],
languageOptions: {
ecmaVersion: 2020,
globals: globals.browser,
globals: globals.browser
},
plugins: {
'react-hooks': reactHooks,
'react-refresh': reactRefresh,
"react-hooks": reactHooks,
"react-refresh": reactRefresh
},
rules: {
...reactHooks.configs.recommended.rules,
'react-refresh/only-export-components': [
'warn',
{ allowConstantExport: true },
],
},
},
)
"react-refresh/only-export-components": ["warn", { allowConstantExport: true }],
"@typescript-eslint/no-unused-vars": [
"error",
{
args: "all",
argsIgnorePattern: "^_",
caughtErrors: "all",
caughtErrorsIgnorePattern: "^_",
destructuredArrayIgnorePattern: "^_",
varsIgnorePattern: "^_",
ignoreRestSiblings: true
}
]
}
}
);

View File

@ -1,45 +1,39 @@
import gulp from "gulp";
import ts from "gulp-typescript";
// @ts-ignore
import clean from "gulp-clean";
import fs from "fs";
const tsProject = ts.createProject('tsconfig.json');
const tsProject = ts.createProject("tsconfig.json");
gulp.task('clean', function () {
return gulp.src('dist/electron', {read: false, allowEmpty: true})
.pipe(clean());
gulp.task("clean", function () {
return gulp.src("dist/electron", { read: false, allowEmpty: true }).pipe(clean());
});
gulp.task('scripts', () => {
gulp.task("scripts", () => {
if (!fs.existsSync("dist/electron")) {
fs.mkdirSync("dist/electron", { recursive: true });
}
const tsResult = tsProject.src()
.pipe(tsProject());
const tsResult = tsProject.src().pipe(tsProject());
const jsFiles = gulp.src(['src/electron/**/*.js', 'src/electron/**/*.cjs']);
const jsFiles = gulp.src(["src/electron/**/*.js", "src/electron/**/*.cjs"]);
return tsResult.js
.pipe(gulp.dest('dist/electron'))
.on('end', () => {
jsFiles.pipe(gulp.dest('dist/electron'));
return tsResult.js.pipe(gulp.dest("dist/electron")).on("end", () => {
jsFiles.pipe(gulp.dest("dist/electron"));
});
});
gulp.task('assets', () => {
return gulp.src('src/electron/assets/**/*', { encoding: false })
.pipe(gulp.dest('dist/electron/assets'));
gulp.task("assets", () => {
return gulp
.src("src/electron/assets/**/*", { encoding: false })
.pipe(gulp.dest("dist/electron/assets"));
});
gulp.task('binary', () => {
return gulp.src('bin/**/*', { encoding: false })
.pipe(gulp.dest('dist/electron/bin'));
gulp.task("binary", () => {
return gulp.src(`bin/${process.platform}-${process.arch}/**/*`, { encoding: false }).pipe(gulp.dest("dist/electron/bin"));
});
gulp.task("locales", () => {
return gulp.src('i18n/**/*')
.pipe(gulp.dest('dist/electron/i18n'));
})
return gulp.src("i18n/**/*").pipe(gulp.dest("dist/electron/i18n"));
});
gulp.task('build', gulp.series('clean', 'scripts', 'assets', 'binary', 'locales'));
gulp.task("build", gulp.series("clean", "scripts", "assets", "binary", "locales"));

View File

@ -1,6 +1,6 @@
{
"name": "openrewind",
"version": "0.6.0",
"version": "0.10.0",
"type": "module",
"description": "Your second brain, superpowered.",
"main": "dist/electron/index.js",
@ -11,15 +11,21 @@
"dev:electron": "bunx gulp build && electron dist/electron/index.js",
"build:react": "vite build",
"build:app": "bunx gulp build",
"build:electron": "electron-builder"
"build:electron": "electron-builder",
"format": "bunx prettier --write ."
},
"keywords": [],
"author": "",
"license": "MIT",
"dependencies": {
"@alikia/random-key": "npm:@jsr/alikia__random-key",
"@electron/remote": "^2.1.2",
"@hono/node-server": "^1.13.7",
"@types/node-os-utils": "^1.3.4",
"@unly/universal-language-detector": "^2.0.3",
"better-sqlite3": "^11.6.0",
"dayjs": "^1.11.13",
"detect-port": "^2.1.0",
"electron-context-menu": "^4.0.4",
"electron-reloader": "^1.2.3",
"electron-screencapture": "^1.1.0",
@ -27,13 +33,17 @@
"electron-store": "^10.0.0",
"electron-window-state": "^5.0.3",
"execa": "^9.5.1",
"hono": "^4.6.15",
"i18next": "^24.0.2",
"i18next-browser-languagedetector": "^8.0.0",
"i18next-electron-fs-backend": "^3.0.2",
"i18next-fs-backend": "^2.6.0",
"i18next-icu": "^2.3.0",
"image-size": "^1.1.1",
"jotai": "^2.11.0",
"memory-cache": "^0.2.0",
"node-os-utils": "^1.3.7",
"pino": "^9.6.0",
"react": "^18.3.1",
"react-dom": "^18.3.1",
"react-i18next": "^15.1.2",

View File

@ -1,12 +1,251 @@
import "./index.css";
import { useCallback, useEffect, useRef, useState } from "react";
import type { Frame } from "../../src/electron/backend/schema.d.ts";
import dayjs from "dayjs";
import relativeTime from "dayjs/plugin/relativeTime";
import localizedFormat from "dayjs/plugin/localizedFormat";
import updateLocale from "dayjs/plugin/updateLocale";
import { useAtomValue } from "jotai";
import { apiInfoAtom } from "src/renderer/state/apiInfo.ts";
export default function RewindPage() {
dayjs.extend(relativeTime);
dayjs.extend(localizedFormat);
dayjs.extend(updateLocale);
dayjs.updateLocale("en", {
relativeTime: {
future: "in %s",
past: "%s ago",
s: "%d seconds",
m: "1 minute",
mm: "%d minutes",
h: "1 hour",
hh: "%d hours",
d: "1 day",
dd: "%d days",
M: "1 month",
MM: "%d months",
y: "1 year",
yy: "%d years"
}
});
function Image({ src }: { src: string }) {
return (
<>
<div className="w-screen h-screen relative dark:text-white">
</div>
</>
<img
src={src}
alt="Current frame"
className="w-full h-full object-contain absolute inset-0"
/>
);
}
// TODO: Memory optimization
export default function RewindPage() {
const { port, apiKey } = useAtomValue(apiInfoAtom);
const [timeline, setTimeline] = useState<Frame[]>([]);
const [currentFrameId, setCurrentFrameId] = useState<number | null>(null);
const [images, setImages] = useState<Record<number, string>>({});
const [isLoadingMore, setIsLoadingMore] = useState(false);
const containerRef = useRef<HTMLDivElement>(null);
const lastAvaliableFrameId = useRef<number | null>(null);
const timeoutRef = useRef<NodeJS.Timeout | null>(null);
const updatedTimes = useRef<number>(0);
const loadingQueue = useRef<number[]>([]);
const isProcessingQueue = useRef(false);
const processQueue = useCallback(async () => {
if (!port || isProcessingQueue.current || loadingQueue.current.length === 0) return;
isProcessingQueue.current = true;
const frameId = loadingQueue.current.shift()!;
try {
const startUpdateTimes = updatedTimes.current;
const response = await fetch(`http://localhost:${port}/frame/${frameId}`, {
headers: {
"x-api-key": apiKey
}
});
const blob = await response.blob();
const url = URL.createObjectURL(blob);
setImages((prev) => {
const newImages = { ...prev, [frameId]: url };
if (updatedTimes.current <= startUpdateTimes) {
lastAvaliableFrameId.current = frameId;
updatedTimes.current++;
}
return newImages;
});
} catch (error) {
console.error(error);
} finally {
isProcessingQueue.current = false;
setTimeout(() => {
processQueue();
}, 500);
}
}, [apiKey, port]);
const loadImage = useCallback(
(frameId: number) => {
if (!port || images[frameId]) return;
// Add to queue if not already in it
if (!loadingQueue.current.includes(frameId)) {
loadingQueue.current.push(frameId);
// preserve up to 5 tasks in the queue
loadingQueue.current = loadingQueue.current.slice(-5);
}
// Start processing if not already running
if (!isProcessingQueue.current) {
processQueue();
}
},
[images, port, processQueue]
);
// Load current frame after 400ms of inactivity
useEffect(() => {
if (timeoutRef.current) {
clearTimeout(timeoutRef.current);
}
if (currentFrameId) {
timeoutRef.current = setTimeout(() => {
loadImage(currentFrameId);
}, 400);
}
return () => {
if (timeoutRef.current) {
clearTimeout(timeoutRef.current);
}
};
}, [currentFrameId, loadImage]);
// Fetch timeline data
const fetchTimeline = useCallback(
async (untilID?: number) => {
if (!port) return;
try {
const url = new URL(`http://localhost:${port}/timeline`);
if (untilID) {
url.searchParams.set("untilID", untilID.toString());
}
const response = await fetch(url.toString(), {
headers: {
"x-api-key": apiKey
}
});
const data = await response.json();
setTimeline((prev) => (untilID ? [...prev, ...data] : data));
} catch (error) {
console.error(error);
}
},
[port, apiKey]
);
useEffect(() => {
fetchTimeline();
}, [fetchTimeline]);
// Load initial images
useEffect(() => {
if (timeline.length > 0 && !currentFrameId) {
setCurrentFrameId(timeline[0].id);
loadImage(timeline[0].id);
if (timeline.length > 1) {
loadImage(timeline[1].id);
}
}
}, [timeline, currentFrameId, loadImage]);
const lastScrollTime = useRef(Date.now());
const handleScroll = (e: React.WheelEvent) => {
if (!containerRef.current || !currentFrameId) return;
// Only allow scroll changes every 30ms
const now = Date.now();
if (now - lastScrollTime.current < 30) return;
lastScrollTime.current = now;
const delta = Math.sign(e.deltaY);
const currentIndex = timeline.findIndex((frame) => frame.id === currentFrameId);
if (currentIndex === -1) return;
const newIndex = Math.min(Math.max(currentIndex - delta, 0), timeline.length - 1);
const newFrameId = timeline[newIndex].id;
if (newFrameId !== currentFrameId) {
setCurrentFrameId(newFrameId);
// Preload adjacent images
if (newIndex > 0) loadImage(timeline[newIndex - 1].id);
if (newIndex < timeline.length - 1) loadImage(timeline[newIndex + 1].id);
// Load more timeline data when we're near the end
if (newIndex > timeline.length - 10 && !isLoadingMore) {
setIsLoadingMore(true);
const lastID = timeline[timeline.length - 1].id;
fetchTimeline(lastID).finally(() => setIsLoadingMore(false));
}
}
};
function displayTime(time: number) {
// if diff < 1h, fromNow()
// else use localized format
const diff = dayjs().diff(dayjs.unix(time), "second");
if (diff < 3600) {
return dayjs.unix(time).fromNow();
} else {
return dayjs.unix(time).format("llll");
}
}
return (
<div
ref={containerRef}
className="w-screen h-screen relative dark:text-white overflow-hidden bg-black"
onWheel={handleScroll}
>
<img
src={currentFrameId
? images[currentFrameId] ||
(lastAvaliableFrameId.current ? images[lastAvaliableFrameId.current] : "")
: ""}
alt="background"
className="w-full h-full object-cover absolute inset-0 blur-lg"
/>
{/* Current image */}
<Image
src={
currentFrameId
? images[currentFrameId] ||
(lastAvaliableFrameId.current ? images[lastAvaliableFrameId.current] : "")
: ""
}
/>
{/* Time capsule */}
<div
className="absolute bottom-8 left-8 bg-zinc-800 text-white bg-opacity-80 backdrop-blur-lg
rounded-full px-4 py-2 text-xl"
>
{currentFrameId
? displayTime(
timeline.find((frame) => frame.id === currentFrameId)!.createdAt
) || "Loading..."
: "Loading..."}
</div>
</div>
);
}

View File

@ -3,7 +3,7 @@
user-select: none;
}
#settings-note a {
@apply text-blue-700 dark:text-[#66ccff]
@apply text-blue-700 dark:text-[#66ccff];
}
.text-weaken {

View File

@ -36,7 +36,7 @@ function TitleBar() {
<div
className="z-50 absolute right-2.5 top-2.5 bg-red-500 hover:bg-rose-400 h-3 w-3 rounded-full"
onClick={() => {
console.log(window.settingsWindow)
console.log(window.settingsWindow);
window.settingsWindow.close();
}}
>

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,3 @@
export const RECORD_FRAME_RATE = 0.5;
export const ENCODING_FRAME_RATE = 30;
export const ENCODING_FRAME_INTERVAL = 1 / ENCODING_FRAME_RATE;

View File

@ -1,58 +1,77 @@
import { Database } from "better-sqlite3";
import { exec, spawnSync } from "child_process";
import { exec } from "child_process";
import fs from "fs";
import path, { join } from "path";
import type { EncodingTask, Frame } from "./schema";
import sizeOf from "image-size";
import { getEncodingTempDir, getRecordingsDir, getScreenshotsDir } from "../utils/backend.js";
import { getEncodeCommand } from "../utils/index.js";
import { getRecordingsDir, getEncodingTempDir, getScreenshotsDir } from "../utils/index.js";
import cache from "memory-cache";
import { ENCODING_FRAME_INTERVAL, RECORD_FRAME_RATE as FRAME_RATE } from "./consts.js";
import { getDatabase } from "../utils/index.js";
const ENCODING_INTERVAL = 10000; // 10 sec
const CHECK_TASK_INTERVAL = 5000; // 5 sec
const MIN_FRAMES_TO_ENCODE = 60; // At least 10 mins (0.5fps)
const CONCURRENCY = 1; // Number of concurrent encoding tasks
const THREE_MINUTES = 180;
const MIN_FRAMES_TO_ENCODE = THREE_MINUTES * FRAME_RATE;
const CONCURRENCY = 1;
// Detect and insert encoding tasks
export function checkFramesForEncoding(db: Database) {
export function checkFramesForEncoding() {
const db = getDatabase();
const stmt = db.prepare(`
SELECT id, imgFilename, createdAt
FROM frame
WHERE encodeStatus = 0
ORDER BY createdAt ASC;
AND imgFilename IS NOT NULL
ORDER BY createdAt;
`);
const frames = stmt.all() as Frame[];
const buffer: Frame[] = [];
if (frames.length < MIN_FRAMES_TO_ENCODE) return;
for (let i = 1; i < frames.length; i++) {
const frame = frames[i];
const lastFrame = frames[i - 1];
const currentFrameSize = sizeOf(join(getScreenshotsDir(), frame.imgFilename));
const lastFrameSize = sizeOf(join(getScreenshotsDir(), lastFrame.imgFilename));
const framePath = join(getScreenshotsDir(), frame.imgFilename!);
const lastFramePath = join(getScreenshotsDir(), lastFrame.imgFilename!);
if (!fs.existsSync(framePath)) {
console.warn("File not exist:", frame.imgFilename);
deleteFrameFromDB(frame.id);
continue;
}
if (!fs.existsSync(lastFramePath)) {
console.warn("File not exist:", lastFrame.imgFilename);
deleteFrameFromDB(lastFrame.id);
continue;
}
const currentFrameSize = sizeOf(framePath);
const lastFrameSize = sizeOf(lastFramePath);
const twoFramesHaveSameSize =
currentFrameSize.width === lastFrameSize.width
&& currentFrameSize.height === lastFrameSize.height;
currentFrameSize.width === lastFrameSize.width &&
currentFrameSize.height === lastFrameSize.height;
const bufferIsBigEnough = buffer.length >= MIN_FRAMES_TO_ENCODE;
const chunkConditionSatisfied = !twoFramesHaveSameSize || bufferIsBigEnough;
buffer.push(lastFrame);
if (chunkConditionSatisfied) {
// Create new encoding task
const taskStmt = db.prepare(`
INSERT INTO encoding_task (status) VALUES (0);
INSERT INTO encoding_task (status)
VALUES (0);
`);
const taskId = taskStmt.run().lastInsertRowid;
// Insert frames into encoding_task_data
const insertStmt = db.prepare(`
INSERT INTO encoding_task_data (encodingTaskID, frame) VALUES (?, ?);
INSERT INTO encoding_task_data (encodingTaskID, frame)
VALUES (?, ?);
`);
for (const frame of buffer) {
insertStmt.run(taskId, frame.id);
db.prepare(`
UPDATE frame SET encodeStatus = 1 WHERE id = ?;
`).run(frame.id);
db.prepare(
`
UPDATE frame
SET encodeStatus = 1
WHERE id = ?;
`
).run(frame.id);
}
console.log(`Created encoding task ${taskId} with ${buffer.length} frames`);
buffer.length = 0;
@ -60,30 +79,92 @@ export function checkFramesForEncoding(db: Database) {
}
}
export async function deleteEncodedScreenshots(db: Database) {
function deleteEncodedScreenshots() {
const db = getDatabase();
// TODO: double-check that the frame was really encoded into the video
const stmt = db.prepare(`
SELECT * FROM frame WHERE encodeStatus = 2 AND imgFilename IS NOT NULL;
SELECT *
FROM frame
WHERE encodeStatus = 2
AND imgFilename IS NOT NULL;
`);
const frames = stmt.all() as Frame[];
for (const frame of frames) {
fs.unlinkSync(path.join(getScreenshotsDir(), frame.imgFilename));
const imgPath = path.join(getScreenshotsDir(), frame.imgFilename!);
if (fs.existsSync(imgPath)) {
fs.unlinkSync(imgPath);
}
const updateStmt = db.prepare(`
UPDATE frame SET imgFilename = NULL WHERE id = ?;
UPDATE frame
SET imgFilename = NULL
WHERE id = ?;
`);
updateStmt.run(frame.id);
}
}
function _deleteNonExistentScreenshots() {
const db = getDatabase();
const screenshotDir = getScreenshotsDir();
const filesInDir = new Set(fs.readdirSync(screenshotDir));
const dbStmt = db.prepare(`
SELECT imgFilename
FROM frame
WHERE imgFilename IS NOT NULL;
`);
const dbFiles = dbStmt.all() as { imgFilename: string }[];
const dbFileSet = new Set(dbFiles.map((f) => f.imgFilename));
for (const filename of filesInDir) {
if (!dbFileSet.has(filename)) {
//fs.unlinkSync(path.join(screenshotDir, filename));
console.log("[dry-run] delete:", filename);
}
}
}
export async function deleteUnnecessaryScreenshots() {
deleteEncodedScreenshots();
//deleteNonExistentScreenshots();
}
export function deleteFrameFromDB(id: number) {
const db = getDatabase();
const deleteStmt = db.prepare(`
DELETE
FROM frame
WHERE id = ?;
`);
deleteStmt.run(id);
console.log(`Deleted frame ${id} from database`);
}
function getTasksPerforming() {
return (cache.get("backend:encodingTasksPerforming") as string[]) || [];
}
function createMetaFile(frames: Frame[]) {
return frames
.map((frame) => {
if (!frame.imgFilename) return "";
const framePath = join(getScreenshotsDir(), frame.imgFilename);
const duration = ENCODING_FRAME_INTERVAL.toFixed(5);
return `file '${framePath}'\nduration ${duration}`;
})
.join("\n");
}
// Check and process encoding task
export function processEncodingTasks(db: Database) {
const tasksPerforming = cache.get("tasksPerforming") as string[] || [];
export function processEncodingTasks() {
const db = getDatabase();
let tasksPerforming = getTasksPerforming();
if (tasksPerforming.length >= CONCURRENCY) return;
const stmt = db.prepare(`
SELECT id, status
FROM encoding_task
WHERE status = 0
LIMIT ?
WHERE status = 0 LIMIT ?
`);
const tasks = stmt.all(CONCURRENCY - tasksPerforming.length) as EncodingTask[];
@ -95,7 +176,9 @@ export function processEncodingTasks(db: Database) {
// Update task status as processing (1)
const updateStmt = db.prepare(`
UPDATE encoding_task SET status = 1 WHERE id = ?
UPDATE encoding_task
SET status = 1
WHERE id = ?
`);
updateStmt.run(taskId);
@ -104,19 +187,19 @@ export function processEncodingTasks(db: Database) {
FROM encoding_task_data
JOIN frame ON encoding_task_data.frame = frame.id
WHERE encoding_task_data.encodingTaskID = ?
ORDER BY frame.createdAt ASC
ORDER BY frame.createdAt
`);
const frames = framesStmt.all(taskId) as Frame[];
const metaFilePath = path.join(getEncodingTempDir(), `${taskId}_meta.txt`);
const metaContent = frames.map(frame => `file '${path.join(getScreenshotsDir(), frame.imgFilename)}'\nduration 0.03333`).join("\n");
const metaContent = createMetaFile(frames);
fs.writeFileSync(metaFilePath, metaContent);
cache.put("tasksPerforming", [...tasksPerforming, taskId.toString()]);
cache.put("backend:encodingTasksPerforming", [...tasksPerforming, taskId.toString()]);
const videoPath = path.join(getRecordingsDir(), `${taskId}.mp4`);
const ffmpegCommand = `ffmpeg -f concat -safe 0 -i "${metaFilePath}" -c:v libx264 -r 30 "${videoPath}"`;
const ffmpegCommand = getEncodeCommand(metaFilePath, videoPath);
console.log("FFMPEG", ffmpegCommand);
exec(ffmpegCommand, (error, stdout, stderr) => {
exec(ffmpegCommand, (error, _stdout, _stderr) => {
if (error) {
console.error(`FFmpeg error: ${error.message}`);
// Roll back transaction
@ -125,20 +208,29 @@ export function processEncodingTasks(db: Database) {
console.log(`Video ${videoPath} created successfully`);
// Update task status to complete (2)
const completeStmt = db.prepare(`
UPDATE encoding_task SET status = 2 WHERE id = ?
UPDATE encoding_task
SET status = 2
WHERE id = ?
`);
completeStmt.run(taskId);
for (let frameIndex = 0; frameIndex < frames.length; frameIndex++) {
const frame = frames[frameIndex];
const updateFrameStmt = db.prepare(`
UPDATE frame SET videoPath = ?, videoFrameIndex = ?, encodeStatus = 2 WHERE id = ?
UPDATE frame
SET videoPath = ?,
videoFrameIndex = ?,
encodeStatus = 2
WHERE id = ?
`);
updateFrameStmt.run(`${taskId}.mp4`, frameIndex, frame.id);
}
db.prepare(`COMMIT;`).run();
}
cache.put("tasksPerforming", tasksPerforming.filter(id => id !== taskId.toString()));
tasksPerforming = getTasksPerforming();
cache.put(
"backend:encodingTasksPerforming",
tasksPerforming.filter((id) => id !== taskId.toString())
);
fs.unlinkSync(metaFilePath);
});
}

View File

@ -2,25 +2,27 @@ import * as path from "path";
import { Database } from "better-sqlite3";
import DB from "better-sqlite3";
import { __dirname } from "../dirname.js";
import { getDatabaseDir } from "../utils/backend.js";
import { getBinDir, getDatabaseDir } from "../utils/index.js";
import { migrate } from "./migrate/index.js";
function getLibSimpleExtensionPath() {
switch (process.platform) {
case "win32":
return path.join(__dirname, "bin", process.platform, "libsimple", "simple.dll");
return path.join(getBinDir(), "libsimple", "simple.dll");
case "darwin":
return path.join(__dirname, "bin", process.platform, "libsimple", "libsimple.dylib");
return path.join(getBinDir(), "libsimple", "libsimple.dylib");
case "linux":
return path.join(__dirname, "bin", process.platform, "libsimple", "libsimple.so");
return path.join(getBinDir(), "libsimple", "libsimple.so");
default:
throw new Error("Unsupported platform");
}
}
function databaseInitialized(db: Database) {
return db.prepare(`SELECT name FROM sqlite_master WHERE type='table' AND name='frame';`).get()
!== undefined;
return (
db.prepare(`SELECT name FROM sqlite_master WHERE type='table' AND name='frame';`).get() !==
undefined
);
}
function init(db: Database) {
@ -140,15 +142,14 @@ function init(db: Database) {
export async function initDatabase() {
const dbPath = getDatabaseDir();
const db = new DB(dbPath, { verbose: console.log });
const db = new DB(dbPath);
const libSimpleExtensionPath = getLibSimpleExtensionPath();
db.loadExtension(libSimpleExtensionPath);
if (!databaseInitialized(db)) {
init(db);
}
else {
} else {
migrate(db);
}

View File

@ -12,26 +12,23 @@ function migrateTo(version: number, db: Database) {
}
}
function getVersion(db: Database): number {
const stmt = db.prepare(`SELECT value FROM config WHERE key = 'version';`);
const data = stmt.get() as { value: string };
const version = data.value;
return parseInt(version);
}
export function migrate(db: Database) {
const configTableExists =
db.prepare(`SELECT name FROM sqlite_master WHERE type='table' AND name='config';`).get()
!== undefined;
db.prepare(`SELECT name FROM sqlite_master WHERE type='table' AND name='config';`).get() !==
undefined;
if (!configTableExists) {
migrateToV2(db);
}
let databaseVersion = parseInt(
(
db.prepare(`SELECT value FROM config WHERE key = 'version';`).get() as
{ value: any }
).value
);
let databaseVersion = getVersion(db);
while (databaseVersion < CURRENT_VERSION) {
migrateTo(databaseVersion, db);
databaseVersion = parseInt(
(
db.prepare(`SELECT value FROM config WHERE key = 'version';`).get() as
{ value: any }
).value
);
databaseVersion = getVersion(db);
}
}

View File

@ -55,9 +55,9 @@ function initSchemaInV2(db: Database) {
}
/*
* This function assumes that the database does not contain the "config" table,
* and thus needs to be migrated to Version 2.
* */
* This function assumes that the database does not contain the "config" table,
* and thus needs to be migrated to Version 2.
* */
export function migrateToV2(db: Database) {
initSchemaInV2(db);
@ -65,12 +65,11 @@ export function migrateToV2(db: Database) {
// Before: /Users/username/Library/Application Support/OpenRewind/Record Data/temp/screenshots/1733568609960.jpg
// After: 1733568609960.jpg
const rows = db.prepare("SELECT id, imgFilename FROM frame").all() as OldFrame[];
rows.forEach(row => {
rows.forEach((row) => {
const filename = row.imgFilename.match(/[^\\/]+$/)?.[0];
if (filename) {
db.prepare("UPDATE frame SET imgFilename = ? WHERE id = ?")
.run(filename, row.id);
db.prepare("UPDATE frame SET imgFilename = ? WHERE id = ?").run(filename, row.id);
}
});

View File

@ -12,7 +12,7 @@ function transformEncodingTask(db: Database) {
const createTableSql = `
CREATE TABLE IF NOT EXISTS encoding_task_new (
id INTEGER PRIMARY KEY AUTOINCREMENT,
createdAt TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
createdAt REAL,
status INT DEFAULT 0
);
@ -24,7 +24,10 @@ function transformEncodingTask(db: Database) {
`;
db.exec(createTableSql);
const rows = db.prepare(`SELECT id, createdAt FROM encoding_task`).all() as { [x: string]: unknown; id: unknown; }[];
const rows = db.prepare(`SELECT id, createdAt FROM encoding_task`).all() as {
[x: string]: unknown;
id: unknown;
}[];
const updateStmt = db.prepare(`UPDATE encoding_task SET createdAt_new = ? WHERE id = ?`);
rows.forEach((row) => {
const unixTimestamp = convertTimestampToUnix(row.createdAt as string);
@ -41,7 +44,7 @@ function transformFrame(db: Database) {
const createTableSql = `
CREATE TABLE frame_new(
id INTEGER PRIMARY KEY AUTOINCREMENT,
createdAt TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
createdAt REAL,
imgFilename TEXT,
segmentID INTEGER NULL,
videoPath TEXT NULL,
@ -55,10 +58,13 @@ function transformFrame(db: Database) {
DROP TABLE frame;
ALTER TABLE frame_new RENAME TO frame;
ALTER TABLE frame ADD COLUMN createdAt_new REAL;
`
`;
db.exec(createTableSql);
const rows = db.prepare(`SELECT id, createdAt FROM frame`).all() as { [x: string]: unknown; id: unknown; }[];
const rows = db.prepare(`SELECT id, createdAt FROM frame`).all() as {
[x: string]: unknown;
id: unknown;
}[];
const updateStmt = db.prepare(`UPDATE frame SET createdAt_new = ? WHERE id = ?`);
rows.forEach((row) => {
const unixTimestamp = convertTimestampToUnix(row.createdAt as string);
@ -92,7 +98,10 @@ function transformSegments(db: Database) {
ALTER TABLE segments ADD COLUMN startedAt_new REAL;
ALTER TABLE segments ADD COLUMN endedAt_new REAL;
`);
const rows = db.prepare(`SELECT id, startedAt, endedAt FROM segments`).all() as { [x: string]: unknown; id: unknown; }[];
const rows = db.prepare(`SELECT id, startedAt, endedAt FROM segments`).all() as {
[x: string]: unknown;
id: unknown;
}[];
const updateStart = db.prepare(`UPDATE segments SET startedAt_new = ? WHERE id = ?`);
const updateEnd = db.prepare(`UPDATE segments SET endedAt_new = ? WHERE id = ?`);
rows.forEach((row) => {
@ -108,8 +117,17 @@ function transformSegments(db: Database) {
`);
}
function renameColumn(tableName: string, oldColumnName: string, newColumnName: string, db: Database) {
if (db.prepare(`SELECT 1 FROM pragma_table_info(?) WHERE name=?`).get([tableName, oldColumnName])) {
function renameColumn(
tableName: string,
oldColumnName: string,
newColumnName: string,
db: Database
) {
if (
db
.prepare(`SELECT 1 FROM pragma_table_info(?) WHERE name=?`)
.get([tableName, oldColumnName])
) {
db.exec(`ALTER TABLE ${tableName} RENAME COLUMN ${oldColumnName} TO ${newColumnName};`);
}
}
@ -173,10 +191,10 @@ export function migrateToV3(db: Database) {
PRAGMA foreign_keys = ON;
`);
renameColumn('encoding_task', 'createAt', 'createdAt', db);
renameColumn('frame', 'createAt', 'createdAt', db);
renameColumn('segments', 'startAt', 'startedAt', db);
renameColumn('segments', 'endAt', 'endedAt', db);
renameColumn("encoding_task", "createAt", "createdAt", db);
renameColumn("frame", "createAt", "createdAt", db);
renameColumn("segments", "startAt", "startedAt", db);
renameColumn("segments", "endAt", "endedAt", db);
if (db.prepare(`SELECT 1 FROM pragma_table_info('frame') WHERE name='encoded'`).get()) {
db.prepare(`ALTER TABLE frame DROP COLUMN encoded`).run();
}

View File

@ -0,0 +1,13 @@
type RecognitionResult = RecognitionLine[];
type Pixels = number;
type OriginX = Pixels;
type OriginY = Pixels;
type Width = Pixels;
type Height = Pixels;
type Coordinates = [OriginX, OriginY, Width, Height];
interface RecognitionLine {
text: string;
confidence?: number | number[];
position?: Coordinates;
}

View File

@ -0,0 +1,253 @@
import osu from "node-os-utils";
type TaskId = string;
type TaskFunction = () => void;
interface Task {
id: TaskId;
func: TaskFunction;
interval?: number;
lastRun?: number;
nextRun?: number;
isPaused: boolean;
delayUntil?: number;
requiredSystemState: SystemState;
}
export interface TaskStatus {
status: "NOT_FOUND" | "PAUSED" | "DELAYED" | "SCHEDULED" | "IDLE";
until?: string;
nextRun?: string;
}
type SystemState = "ANY" | "LOW_POWER" | "IDLE";
export class Scheduler {
private tasks: Map<TaskId, Task> = new Map();
private timer: NodeJS.Timeout | null = null;
private monitorTimer: NodeJS.Timeout | null = null;
private cpuUsage: number = 0;
constructor(private readonly minTickInterval: number = 500) {
this.start();
}
private start(): void {
this.scheduleNextTick();
this.monitorTimer = setInterval(() => this.monitor(), 1000);
}
private monitor(): void {
osu.cpu.usage().then((cpuPercentage) => {
this.cpuUsage = cpuPercentage / 100;
})
}
private scheduleNextTick(): void {
if (this.timer) {
clearTimeout(this.timer);
}
const now = Date.now();
let nextTick = now + this.minTickInterval;
for (const task of this.tasks.values()) {
const isTaskPaused = task.isPaused;
const isTaskDelayed = task.delayUntil && now < task.delayUntil;
if (isTaskPaused || isTaskDelayed) {
continue;
}
const nextTaskEarlierThanNextTick = task.nextRun && task.nextRun < nextTick;
if (nextTaskEarlierThanNextTick) {
nextTick = task.nextRun!;
}
}
const delay = Math.max(0, nextTick - now);
this.timer = setTimeout(() => this.tick(), delay);
}
private tickSingleTask(
task: Task,
getNextTick: () => number,
updateNextTick: (nextTick: number) => void
): void {
const now = Date.now();
const isTaskPaused = task.isPaused;
const isTaskDelayed = task.delayUntil && now < task.delayUntil;
if (isTaskPaused || isTaskDelayed) {
return;
}
const taskRequiredLowPower = task.requiredSystemState === "LOW_POWER";
const cpuUsage = this.cpuUsage;
const isSystemLowPower = cpuUsage < 0.75;
const isTaskReadyForLowPowerRun = taskRequiredLowPower ? isSystemLowPower : true;
const reachedTaskNextRun = task.interval && task.nextRun && now >= task.nextRun;
const isTaskReadyForIntervalRun = reachedTaskNextRun && isTaskReadyForLowPowerRun;
if (!isTaskReadyForLowPowerRun) {
this.delayTask(task.id, 1000)
}
if (isTaskReadyForIntervalRun) {
task.func();
task.lastRun = now;
task.nextRun = now + task.interval!;
}
const isTaskNextRunEarlierThanNextTick = task.nextRun && task.nextRun < getNextTick();
if (isTaskNextRunEarlierThanNextTick) {
updateNextTick(task.nextRun!);
}
}
private tick(): void {
const now = Date.now();
let nextTick = now + this.minTickInterval;
for (const task of this.tasks.values()) {
this.tickSingleTask(
task,
() => nextTick,
(v) => (nextTick = v)
);
}
this.scheduleNextTick();
}
/**
* Add a new task to the scheduler.
*
* @param id A unique string identifier for the task.
* @param func The function to be executed by the task.
* @param interval The interval (in milliseconds) between task executions.
* @param requiredSystemState The required system state for the task to run.
*/
addTask(id: TaskId, func: TaskFunction, interval?: number, requiredSystemState: SystemState = "ANY"): void {
this.tasks.set(id, {
id,
func,
interval,
isPaused: false,
lastRun: undefined,
nextRun: interval ? Date.now() + interval : undefined,
requiredSystemState: requiredSystemState,
});
this.scheduleNextTick();
}
/**
* Trigger a task to execute immediately, regardless of its current state.
*
* If the task is paused or delayed, it will not be executed.
*
* @param id The unique string identifier for the task.
*/
triggerTask(id: TaskId): void {
const task = this.tasks.get(id);
if (task && !task.isPaused && (!task.delayUntil || Date.now() >= task.delayUntil)) {
task.func();
task.lastRun = Date.now();
if (task.interval) {
task.nextRun = Date.now() + task.interval;
}
}
this.scheduleNextTick();
}
/**
* Pause a task, so that it will not be executed until it is resumed.
*
* @param id The unique string identifier for the task.
*/
pauseTask(id: TaskId): void {
const task = this.tasks.get(id);
if (task) {
task.isPaused = true;
}
this.scheduleNextTick();
}
/**
* Resume a paused task, so that it can be executed according to its interval.
*
* @param id The unique string identifier for the task.
*/
resumeTask(id: TaskId): void {
const task = this.tasks.get(id);
if (task) {
task.isPaused = false;
}
this.scheduleNextTick();
}
/**
* Delay a task from being executed for a specified amount of time.
*
* @param id The unique string identifier for the task.
* @param delayMs The amount of time in milliseconds to delay the task's execution.
*/
delayTask(id: TaskId, delayMs: number): void {
const task = this.tasks.get(id);
if (task) {
task.delayUntil = Date.now() + delayMs;
if (task.nextRun) {
task.nextRun += delayMs;
}
}
this.scheduleNextTick();
}
setTaskInterval(id: TaskId, interval: number): void {
const task = this.tasks.get(id);
if (task) {
task.interval = interval;
task.nextRun = Date.now() + interval;
}
this.scheduleNextTick();
}
getTaskStatus(id: TaskId): TaskStatus {
const task = this.tasks.get(id);
if (!task) {
return { status: "NOT_FOUND" };
}
if (task.isPaused) {
return { status: "PAUSED" };
}
if (task.delayUntil && Date.now() < task.delayUntil) {
return {
status: "DELAYED",
until: new Date(task.delayUntil).toLocaleString()
};
}
if (task.nextRun) {
return {
status: "SCHEDULED",
nextRun: new Date(task.nextRun).toLocaleString()
};
}
return { status: "IDLE" };
}
stop(): void {
if (this.timer) {
clearTimeout(this.timer);
this.timer = null;
}
if (this.monitorTimer) {
clearTimeout(this.monitorTimer);
this.monitorTimer = null;
}
}
}

View File

@ -1,7 +1,7 @@
export interface Frame {
id: number;
createdAt: number;
imgFilename: string;
imgFilename: string | null;
segmentID: number | null;
videoPath: string | null;
videoFrameIndex: number | null;
@ -9,7 +9,6 @@ export interface Frame {
encodeStatus: number;
}
export interface EncodingTask {
id: number;
createdAt: number;

View File

@ -1,24 +1,23 @@
import screenshot from "screenshot-desktop";
import { getScreenshotsDir } from "../utils/backend.js";
import { getDatabase, getScreenshotsDir } from "../utils/index.js";
import { join } from "path";
import { Database }from "better-sqlite3";
import SqlString from "sqlstring";
export function startScreenshotLoop(db: Database) {
return setInterval(() => {
export function takeScreenshot() {
const db = getDatabase();
const timestamp = new Date().getTime();
const screenshotDir = getScreenshotsDir();
const filename = `${timestamp}.png`;
const screenshotPath = join(screenshotDir, filename);
screenshot({filename: screenshotPath, format: "png"}).then((absolutePath) => {
screenshot({ filename: screenshotPath, format: "png" })
.then(() => {
const SQL = SqlString.format(
"INSERT INTO frame (imgFilename, createdAt) VALUES (?, ?)",
[filename, new Date().getTime() / 1000]
);
db.exec(SQL);
}).catch((err) => {
})
.catch((err) => {
console.error(err);
});
}, 2000);
}

View File

@ -2,7 +2,7 @@ import { app, BrowserWindow, screen } from "electron";
import { join } from "path";
import { __dirname } from "./dirname.js";
import windowStateManager from "electron-window-state";
import { hideDock, showDock } from "./utils/electron.js";
import { hideDock, showDock } from "./utils/index.js";
function loadURL(window: BrowserWindow, path = "", vitePort: string) {
const dev = !app.isPackaged;
@ -10,23 +10,24 @@ function loadURL(window: BrowserWindow, path = "", vitePort: string) {
window.loadURL(`http://localhost:${vitePort}/#${path}`).catch((e) => {
console.log("Error loading URL:", e);
});
}
else {
window.loadFile(join(__dirname, "../renderer/index.html"), {
hash: path,
}).catch((e) => {
} else {
window
.loadFile(join(__dirname, "../renderer/index.html"), {
hash: path
})
.catch((e) => {
console.log("Error loading URL:", e);
});
}
}
export function createSettingsWindow(vitePort: string, closeCallBack: Function) {
export function createSettingsWindow(vitePort: string, closeCallBack: () => void) {
const windowState = windowStateManager({
defaultWidth: 650,
defaultHeight: 550
});
const enableFrame = process.platform === "darwin";
let icon
let icon;
switch (process.platform) {
case "darwin":
icon = undefined;
@ -46,13 +47,13 @@ export function createSettingsWindow(vitePort: string, closeCallBack: Function)
webPreferences: {
nodeIntegration: true,
contextIsolation: true,
preload: join(__dirname, 'preload/settings.cjs')
preload: join(__dirname, "preload/settings.cjs")
},
titleBarStyle: "hiddenInset",
resizable: false,
show: false,
frame: enableFrame,
icon: icon,
icon: icon
});
windowState.manage(window);
window.on("show", () => {
@ -62,6 +63,7 @@ export function createSettingsWindow(vitePort: string, closeCallBack: Function)
window.hide();
windowState.saveState(window);
e.preventDefault();
closeCallBack();
});
window.once("close", () => {
window.hide();
@ -71,7 +73,7 @@ export function createSettingsWindow(vitePort: string, closeCallBack: Function)
return window;
}
export function createMainWindow(vitePort: string, closeCallBack: Function) {
export function createMainWindow(vitePort: string, closeCallBack: () => void) {
const display = screen.getPrimaryDisplay();
const { width, height } = display.bounds;
const windowState = windowStateManager({
@ -92,17 +94,22 @@ export function createMainWindow(vitePort: string, closeCallBack: Function) {
webPreferences: {
nodeIntegration: true,
contextIsolation: true,
preload: join(__dirname, 'preload/rewind.cjs')
preload: join(__dirname, "preload/rewind.cjs")
},
roundedCorners: false,
transparent: true,
show: false
show: false,
title: "OpenRewind Rewind Page"
});
// Exclude the window from the recording
window.setContentProtection(true);
windowState.manage(window);
window.on("close", () => {
windowState.saveState(window);
closeCallBack();
});
window.once("close", () => {
closeCallBack();

View File

@ -4,7 +4,6 @@ import fs from "fs";
import { app } from "electron";
import { __dirname } from "./dirname.js";
/**
* Selects the appropriate language based on system preferences and available languages
*
@ -21,7 +20,7 @@ export function detectLanguage(langs: string[], fallback: string): string {
const normalizedLang = systemLanguage.toLowerCase().split("-")[0];
// Find a matching language
const matchedLanguage = langs.find(lang => {
const matchedLanguage = langs.find((lang) => {
if (lang.indexOf(normalizedLang) !== -1) {
return lang;
}
@ -53,4 +52,3 @@ export default function initI18n() {
});
return i18n;
}

View File

@ -1,24 +1,43 @@
import { app, BrowserWindow, globalShortcut, ipcMain, Menu, nativeImage, screen, Tray } from "electron";
import {
app,
BrowserWindow,
globalShortcut,
ipcMain,
Menu,
nativeImage,
screen,
Tray
} from "electron";
import contextMenu from "electron-context-menu";
import { join } from "path";
import initI18n from "./i18n.js";
import { createMainWindow, createSettingsWindow } from "./createWindow.js";
import { initDatabase } from "./backend/init.js";
import { Database } from "better-sqlite3";
import { startScreenshotLoop } from "./backend/screenshot.js";
import { takeScreenshot } from "./backend/screenshot.js";
import { __dirname } from "./dirname.js";
import { hideDock } from "./utils/electron.js";
import { checkFramesForEncoding, deleteEncodedScreenshots, processEncodingTasks } from "./backend/encoding.js";
import { hideDock } from "./utils/index.js";
import {
checkFramesForEncoding,
deleteUnnecessaryScreenshots,
processEncodingTasks
} from "./backend/encoding.js";
import honoApp from "./server/index.js";
import { serve } from "@hono/node-server";
import { findAvailablePort } from "./utils/index.js";
import cache from "memory-cache";
import { generate as generateAPIKey } from "@alikia/random-key";
import { Scheduler } from "./backend/scheduler.js";
const i18n = initI18n();
const t = i18n.t.bind(i18n);
const port = process.env.PORT || "5173";
const dev = !app.isPackaged;
const scheduler = new Scheduler();
let tray: null | Tray = null;
let dbConnection: null | Database = null;
let screenshotInterval: null | NodeJS.Timeout = null;
let mainWindow: BrowserWindow | null;
let settingsWindow: BrowserWindow | null;
@ -74,12 +93,29 @@ app.on("activate", () => {});
app.on("ready", () => {
createTray();
findAvailablePort(12412).then((port) => {
generateAPIKey().then((key) => {
cache.put("server:port", port);
if (!dev) {
cache.put("server:APIKey", key);
}
serve({ fetch: honoApp.fetch, port: port });
// Send API info to renderer
settingsWindow?.webContents.send("api-info", {
port,
apiKey: key
});
console.log(`App server running on port ${port}`);
});
});
initDatabase().then((db) => {
screenshotInterval = startScreenshotLoop(db);
setInterval(checkFramesForEncoding, 5000, db);
setInterval(processEncodingTasks, 10000, db);
setInterval(deleteEncodedScreenshots, 5000, db)
scheduler.addTask("screenshot", takeScreenshot, 2000);
scheduler.addTask("check-encoding", checkFramesForEncoding, 5000);
scheduler.addTask("process-encoding", processEncodingTasks, 10000, "LOW_POWER");
scheduler.addTask("delete-screenshots", deleteUnnecessaryScreenshots, 20000);
dbConnection = db;
cache.put("server:dbConnection", dbConnection);
});
mainWindow = createMainWindow(port, () => (mainWindow = null));
settingsWindow = createSettingsWindow(port, () => (settingsWindow = null));
@ -89,14 +125,18 @@ app.on("ready", () => {
});
});
app.on("will-quit", ()=> {
app.on("will-quit", () => {
dbConnection?.close();
scheduler.stop();
});
// app.on("window-all-closed", () => {
// if (process.platform !== "darwin") app.quit();
// });
ipcMain.on('close-settings', () => {
ipcMain.on("close-settings", () => {
settingsWindow?.hide();
});
ipcMain.handle("request-api-info", () => {
return {
port: cache.get("server:port"),
apiKey: cache.get("server:APIKey")
};
});

View File

@ -1,52 +1,52 @@
const os = require('node:os');
const os = require("node:os");
const nameMap = new Map([
[24, ['Sequoia', '15']],
[23, ['Sonoma', '14']],
[22, ['Ventura', '13']],
[21, ['Monterey', '12']],
[20, ['Big Sur', '11']],
[19, ['Catalina', '10.15']],
[18, ['Mojave', '10.14']],
[17, ['High Sierra', '10.13']],
[16, ['Sierra', '10.12']],
[15, ['El Capitan', '10.11']],
[14, ['Yosemite', '10.10']],
[13, ['Mavericks', '10.9']],
[12, ['Mountain Lion', '10.8']],
[11, ['Lion', '10.7']],
[10, ['Snow Leopard', '10.6']],
[9, ['Leopard', '10.5']],
[8, ['Tiger', '10.4']],
[7, ['Panther', '10.3']],
[6, ['Jaguar', '10.2']],
[5, ['Puma', '10.1']],
[24, ["Sequoia", "15"]],
[23, ["Sonoma", "14"]],
[22, ["Ventura", "13"]],
[21, ["Monterey", "12"]],
[20, ["Big Sur", "11"]],
[19, ["Catalina", "10.15"]],
[18, ["Mojave", "10.14"]],
[17, ["High Sierra", "10.13"]],
[16, ["Sierra", "10.12"]],
[15, ["El Capitan", "10.11"]],
[14, ["Yosemite", "10.10"]],
[13, ["Mavericks", "10.9"]],
[12, ["Mountain Lion", "10.8"]],
[11, ["Lion", "10.7"]],
[10, ["Snow Leopard", "10.6"]],
[9, ["Leopard", "10.5"]],
[8, ["Tiger", "10.4"]],
[7, ["Panther", "10.3"]],
[6, ["Jaguar", "10.2"]],
[5, ["Puma", "10.1"]]
]);
const names = new Map([
['10.0.2', '11'], // It's unclear whether future Windows 11 versions will use this version scheme: https://github.com/sindresorhus/windows-release/pull/26/files#r744945281
['10.0', '10'],
['6.3', '8.1'],
['6.2', '8'],
['6.1', '7'],
['6.0', 'Vista'],
['5.2', 'Server 2003'],
['5.1', 'XP'],
['5.0', '2000'],
['4.90', 'ME'],
['4.10', '98'],
['4.03', '95'],
['4.00', '95'],
["10.0.2", "11"], // It's unclear whether future Windows 11 versions will use this version scheme: https://github.com/sindresorhus/windows-release/pull/26/files#r744945281
["10.0", "10"],
["6.3", "8.1"],
["6.2", "8"],
["6.1", "7"],
["6.0", "Vista"],
["5.2", "Server 2003"],
["5.1", "XP"],
["5.0", "2000"],
["4.90", "ME"],
["4.10", "98"],
["4.03", "95"],
["4.00", "95"]
]);
function macosRelease(release) {
release = Number((release || os.release()).split('.')[0]);
release = Number((release || os.release()).split(".")[0]);
const [name, version] = nameMap.get(release) || ['Unknown', ''];
const [name, version] = nameMap.get(release) || ["Unknown", ""];
return {
name,
version,
version
};
}
@ -54,14 +54,14 @@ function windowsRelease(release) {
const version = /(\d+\.\d+)(?:\.(\d+))?/.exec(release || os.release());
if (release && !version) {
throw new Error('`release` argument doesn\'t match `n.n`');
throw new Error("`release` argument doesn't match `n.n`");
}
let ver = version[1] || '';
const build = version[2] || '';
let ver = version[1] || "";
const build = version[2] || "";
if (ver === '10.0' && build.startsWith('2')) {
ver = '10.0.2';
if (ver === "10.0" && build.startsWith("2")) {
ver = "10.0.2";
}
return names.get(ver);
@ -69,47 +69,47 @@ function windowsRelease(release) {
function osName(platform, release) {
if (!platform && release) {
throw new Error('You can\'t specify a `release` without specifying `platform`');
throw new Error("You can't specify a `release` without specifying `platform`");
}
platform = platform ?? os.platform();
let id;
if (platform === 'darwin') {
if (!release && os.platform() === 'darwin') {
if (platform === "darwin") {
if (!release && os.platform() === "darwin") {
release = os.release();
}
const prefix = release ? (Number(release.split('.')[0]) > 15 ? 'macOS' : 'OS X') : 'macOS';
const prefix = release ? (Number(release.split(".")[0]) > 15 ? "macOS" : "OS X") : "macOS";
try {
id = release ? macosRelease(release).name : '';
id = release ? macosRelease(release).name : "";
if (id === 'Unknown') {
if (id === "Unknown") {
return prefix;
}
} catch {}
return prefix + (id ? ' ' + id : '');
return prefix + (id ? " " + id : "");
}
if (platform === 'linux') {
if (!release && os.platform() === 'linux') {
if (platform === "linux") {
if (!release && os.platform() === "linux") {
release = os.release();
}
id = release ? release.replace(/^(\d+\.\d+).*/, '$1') : '';
return 'Linux' + (id ? ' ' + id : '');
id = release ? release.replace(/^(\d+\.\d+).*/, "$1") : "";
return "Linux" + (id ? " " + id : "");
}
if (platform === 'win32') {
if (!release && os.platform() === 'win32') {
if (platform === "win32") {
if (!release && os.platform() === "win32") {
release = os.release();
}
id = release ? windowsRelease(release) : '';
return 'Windows' + (id ? ' ' + id : '');
id = release ? windowsRelease(release) : "";
return "Windows" + (id ? " " + id : "");
}
return platform;

View File

@ -1,20 +1,5 @@
const { contextBridge, ipcRenderer } = require("electron");
// Expose protected methods that allow the renderer process to use
// the ipcRenderer without exposing the entire object
contextBridge.exposeInMainWorld("api", {
send: (channel, data) => {
// whitelist channels
let validChannels = ["toMain"];
if (validChannels.includes(channel)) {
ipcRenderer.send(channel, data);
}
},
receive: (channel, func) => {
let validChannels = ["fromMain"];
if (validChannels.includes(channel)) {
// Deliberately strip event as it includes `sender`
ipcRenderer.on(channel, (event, ...args) => func(...args));
}
}
contextBridge.exposeInMainWorld("appGlobal", {
requestApiInfo: () => ipcRenderer.invoke("request-api-info")
});

View File

@ -17,3 +17,7 @@ contextBridge.exposeInMainWorld("settingsWindow", {
ipcRenderer.send("close-settings", {});
}
});
contextBridge.exposeInMainWorld("appGlobal", {
requestApiInfo: () => ipcRenderer.invoke("request-api-info")
});

View File

@ -0,0 +1,147 @@
import { Hono } from "hono";
import { cors } from "hono/cors";
import cache from "memory-cache";
import { join } from "path";
import fs from "fs";
import { Database } from "better-sqlite3";
import type { Frame } from "../backend/schema";
import {
getDecodingTempDir,
getRecordingsDir,
getScreenshotsDir,
waitForFileExists
} from "../utils/index.js";
import { immediatelyExtractFrameFromVideo } from "../utils/index.js";
import { existsSync } from "fs";
const app = new Hono();
app.use("*", cors());
app.use(async (c, next) => {
const key = cache.get("server:APIKey");
if (key && c.req.header("x-api-key") !== key) {
c.res = undefined;
c.res = c.json({ error: "Invalid API key" }, 401);
}
await next();
});
app.get("/ping", (c) => c.text("pong"));
function getLatestFrames(db: Database, limit = 50): Frame[] {
return db
.prepare(
`
SELECT id, createdAt, imgFilename, videoPath, videoFrameIndex
FROM frame
ORDER BY createdAt DESC
LIMIT ?
`
)
.all(limit) as Frame[];
}
function getFramesUntilID(db: Database, untilID: number, limit = 50): Frame[] {
return db
.prepare(
`
SELECT id, createdAt, imgFilename, videoPath, videoFrameIndex
FROM frame
WHERE id < ?
ORDER BY createdAt DESC
LIMIT ?
`
)
.all(untilID, limit) as Frame[];
}
app.get("/timeline", async (c) => {
const query = c.req.query();
const limit = parseInt(query.limit) || undefined;
const db = cache.get("server:dbConnection");
if (query.untilID) {
return c.json(getFramesUntilID(db, parseInt(query.untilID), limit));
} else {
return c.json(getLatestFrames(db, limit));
}
});
app.get("/frame/:id", async (c) => {
const { id } = c.req.param();
const db: Database = cache.get("server:dbConnection");
const frame = db
.prepare(
`
SELECT imgFilename, videoPath, videoFrameIndex, createdAt
FROM frame
WHERE id = ?
`
)
.get(id) as Frame | undefined;
if (!frame) return c.json({ error: "Frame not found" }, 404);
const decodingTempDir = getDecodingTempDir();
const screenshotsDir = getScreenshotsDir();
const videoFilename = frame.videoPath;
const frameIndex = frame.videoFrameIndex;
const imageFilename = frame.imgFilename;
const bareVideoFilename = videoFilename?.replace(".mp4", "") || null;
const decodedImage = frameIndex
? `${bareVideoFilename}_${frameIndex.toString().padStart(4, "0")}.bmp`
: null;
let returnImagePath = "";
let needToBeDecoded = videoFilename !== null && frameIndex !== null && !frame.imgFilename;
if (decodedImage && fs.existsSync(join(getDecodingTempDir(), decodedImage))) {
needToBeDecoded = false;
returnImagePath = join(decodingTempDir, decodedImage);
} else if (imageFilename && fs.existsSync(join(screenshotsDir, imageFilename))) {
returnImagePath = join(screenshotsDir, imageFilename);
}
if (needToBeDecoded) {
const videoExists = fs.existsSync(join(getRecordingsDir(), videoFilename!));
if (!videoExists) {
return c.json({ error: "Video not found" }, { status: 404 });
}
const decodedFilename = immediatelyExtractFrameFromVideo(
videoFilename!,
frameIndex!,
decodingTempDir
);
const decodedPath = join(decodingTempDir, decodedFilename);
await waitForFileExists(decodedPath);
if (existsSync(decodedPath)) {
const imageBuffer = fs.readFileSync(decodedPath);
setTimeout(() => {
fs.unlinkSync(decodedPath);
}, 1000);
return new Response(imageBuffer, {
status: 200,
headers: {
"Content-Type": "image/bmp"
}
});
} else {
return c.json({ error: "Frame cannot be decoded" }, { status: 500 });
}
} else {
const imageBuffer = fs.readFileSync(returnImagePath);
const imageMimeType = imageFilename?.endsWith(".png") ? "image/png" : "image/jpeg";
return new Response(imageBuffer, {
status: 200,
headers: {
"Content-Type": imageMimeType
}
});
}
});
export default app;

View File

@ -1,78 +0,0 @@
import path from "path";
import os from "os";
import fs from "fs";
import { __dirname } from "../dirname.js";
export function getUserDataDir() {
switch (process.platform) {
case "win32":
return path.join(process.env.APPDATA!, "OpenRewind", "Record Data");
case "darwin":
return path.join(os.homedir(), "Library", "Application Support", "OpenRewind", "Record Data");
case "linux":
return path.join(os.homedir(), ".config", "OpenRewind", "Record Data");
default:
throw new Error("Unsupported platform");
}
}
export function createDataDir() {
const dataDir = getUserDataDir();
if (!fs.existsSync(dataDir)) {
fs.mkdirSync(dataDir, { recursive: true });
}
return dataDir;
}
export function createTempDir() {
const tempDir = path.join(getUserDataDir(), "temp");
if (!fs.existsSync(tempDir)) {
fs.mkdirSync(tempDir, { recursive: true });
}
return tempDir;
}
export function getDatabaseDir() {
const dataDir = createDataDir();
return path.join(dataDir, "main.db");
}
export function getScreenshotsDir() {
const tempDir = createTempDir();
const screenshotsDir = path.join(tempDir, "screenshots");
if (!fs.existsSync(screenshotsDir)) {
fs.mkdirSync(screenshotsDir, { recursive: true });
}
return screenshotsDir;
}
export function getRecordingsDir() {
const dataDir = createDataDir();
const recordingsDir = path.join(dataDir, "recordings");
if (!fs.existsSync(recordingsDir)) {
fs.mkdirSync(recordingsDir, { recursive: true });
}
return path.join(dataDir, "recordings");
}
export function getEncodingTempDir() {
const tempDir = createTempDir();
const encodingTempDir = path.join(tempDir, "encoding");
if (!fs.existsSync(encodingTempDir)) {
fs.mkdirSync(encodingTempDir, { recursive: true });
}
return encodingTempDir;
}
export function getFFmpegPath() {
switch (process.platform) {
case "win32":
return path.join(__dirname, "bin", process.platform, "ffmpeg.exe");
case "darwin":
return path.join(__dirname, "bin", process.platform, "ffmpeg");
case "linux":
return path.join(__dirname, "bin", process.platform, "ffmpeg");
default:
throw new Error("Unsupported platform");
}
}

View File

@ -0,0 +1,6 @@
import { Database } from "better-sqlite3";
import cache from "memory-cache";
export function getDatabase(): Database {
return cache.get("server:dbConnection");
}

View File

@ -1,15 +0,0 @@
import { app } from "electron";
export function hideDock(){
if (process.platform === 'darwin') {
// Hide the dock icon on macOS
app.dock.hide();
}
}
export function showDock(){
if (process.platform === 'darwin') {
// Show the dock icon on macOS
app.dock.show();
}
}

View File

@ -0,0 +1,111 @@
import path from "path";
import fs from "fs";
import { getUserDataDir } from "../platform/index.js";
import { __dirname } from "../../dirname.js";
export function createDataDir() {
const dataDir = getUserDataDir();
if (!fs.existsSync(dataDir)) {
fs.mkdirSync(dataDir, { recursive: true });
}
return dataDir;
}
export function createTempDir() {
const tempDir = path.join(getUserDataDir(), "temp");
if (!fs.existsSync(tempDir)) {
fs.mkdirSync(tempDir, { recursive: true });
}
return tempDir;
}
export function getDatabaseDir() {
const dataDir = createDataDir();
return path.join(dataDir, "main.db");
}
export function getScreenshotsDir() {
const tempDir = createTempDir();
const screenshotsDir = path.join(tempDir, "screenshots");
if (!fs.existsSync(screenshotsDir)) {
fs.mkdirSync(screenshotsDir, { recursive: true });
}
return screenshotsDir;
}
export function getRecordingsDir() {
const dataDir = createDataDir();
const recordingsDir = path.join(dataDir, "recordings");
if (!fs.existsSync(recordingsDir)) {
fs.mkdirSync(recordingsDir, { recursive: true });
}
return path.join(dataDir, "recordings");
}
export function getEncodingTempDir() {
const tempDir = createTempDir();
const encodingTempDir = path.join(tempDir, "encoding");
if (!fs.existsSync(encodingTempDir)) {
fs.mkdirSync(encodingTempDir, { recursive: true });
}
return encodingTempDir;
}
export function getDecodingTempDir() {
const tempDir = createTempDir();
const decodingTempDir = path.join(tempDir, "decoding");
if (!fs.existsSync(decodingTempDir)) {
fs.mkdirSync(decodingTempDir, { recursive: true });
}
return decodingTempDir;
}
export function getLogDir() {
const dataDir = createDataDir();
const logDir = path.join(dataDir, "logs");
if (!fs.existsSync(logDir)) {
fs.mkdirSync(logDir, { recursive: true });
}
return logDir;
}
export function getBinDir() {
return path.join(__dirname, "bin");
}
export async function waitForFileExists(filePath: string, timeout: number = 10000): Promise<void> {
return new Promise((resolve, reject) => {
fs.access(filePath, fs.constants.F_OK, (err) => {
if (!err) {
resolve();
return;
}
const dir = path.dirname(filePath);
const filename = path.basename(filePath);
const watcher = fs.watch(dir, (eventType, watchedFilename) => {
if (eventType === "rename" && watchedFilename === filename) {
fs.access(filePath, fs.constants.F_OK, (err) => {
if (!err) {
clearTimeout(timeoutId);
watcher.close();
resolve();
}
});
}
});
watcher.on("error", (err) => {
clearTimeout(timeoutId);
watcher.close();
reject(err);
});
const timeoutId = setTimeout(() => {
watcher.close();
reject(new Error(`Timeout: File ${filePath} did not exist within ${timeout}ms`));
}, timeout);
});
});
}

View File

@ -0,0 +1,6 @@
export * from "./fs/index.js";
export * from "./platform/index.js";
export * from "./video/index.js";
export * from "./network/index.js";
export * from "./logging/index.js";
export * from "./backend/index.js";

View File

@ -0,0 +1,10 @@
import pino from "pino";
import { join } from "path";
import { getLogDir } from "../fs/index.js";
const logPath = join(getLogDir(), "log.json");
const dest = pino.destination(logPath);
const logger = pino(dest);
export { logger };

View File

@ -0,0 +1,17 @@
import { detect } from "detect-port";
/**
* Finds an available port starting from a given port.
* @param startingFrom - The port number to start searching from.
* @returns A Promise that resolves to the first available port number.
*/
export async function findAvailablePort(startingFrom: number): Promise<number> {
return detect(startingFrom)
.then((realPort) => {
return realPort; // Return the available port
})
.catch((err) => {
console.error(`Error detecting port: ${err.message}`);
throw err; // Rethrow the error for further handling if needed
});
}

View File

@ -0,0 +1,60 @@
import { join } from "path";
import os from "os";
import { app } from "electron";
import { getBinDir, logger } from "../index.js";
export function getUserDataDir() {
switch (process.platform) {
case "win32":
return join(process.env.APPDATA!, "OpenRewind", "Record Data");
case "darwin":
return join(
os.homedir(),
"Library",
"Application Support",
"OpenRewind",
"Record Data"
);
case "linux":
return join(os.homedir(), ".config", "OpenRewind", "Record Data");
default:
throw new Error("Unsupported platform");
}
}
export function hideDock() {
if (process.platform === "darwin") {
// Hide the dock icon on macOS
app.dock.hide();
}
}
export function showDock() {
if (process.platform === "darwin") {
// Show the dock icon on macOS
app.dock.show();
}
}
export function getFFmpegPath() {
let path = "";
switch (process.platform) {
case "win32":
path = join(getBinDir(), "ffmpeg.exe");
break;
case "darwin":
path = join(getBinDir(), "ffmpeg");
break;
case "linux":
path = join(getBinDir(), "ffmpeg");
break;
default:
throw new Error("Unsupported platform");
}
logger.info("FFmpeg path: %s", path);
return path;
}
export function getOCRitPath() {
const path = join(getBinDir(), "ocrit");
}

View File

@ -0,0 +1,81 @@
import { join } from "path";
import { spawn, execSync } from "child_process";
import { getRecordingsDir } from "../fs/index.js";
import { getFFmpegPath } from "../platform/index.js";
import { ENCODING_FRAME_RATE } from "../../backend/consts.js";
import cache from "memory-cache";
function getBestCodec() {
const cachedCodec = cache.get("backend:bestCodec");
if (cachedCodec) {
return cachedCodec;
}
const codecs = execSync(`${getFFmpegPath()} -codecs`).toString("utf-8");
let codec = "";
if (codecs.includes("h264_videotoolbox")) {
codec = "h264_videotoolbox";
} else {
codec = "libx264";
}
cache.put("backend:bestCodec", codec);
return codec;
}
export function getEncodeCommand(metaFilePath: string, videoPath: string) {
const codec = getBestCodec();
return `${getFFmpegPath()} -f concat -safe 0 -i "${metaFilePath}" -c:v ${codec} -r ${ENCODING_FRAME_RATE} -y -threads 1 "${videoPath}"`;
}
export function immediatelyExtractFrameFromVideo(
videoFilename: string,
frameIndex: number,
outputPath = "."
) {
const bareVideoFilename = videoFilename.split(".").slice(0, -1).join(".");
const fullVideoPath = join(getRecordingsDir(), videoFilename);
const outputFilename = `${bareVideoFilename}_${frameIndex.toString().padStart(4, "0")}.bmp`;
const outputPathArg = join(outputPath, outputFilename);
const args = [
"-ss",
`${formatTime(frameIndex / ENCODING_FRAME_RATE)}`,
"-i",
`${fullVideoPath}`,
"-vframes",
"1",
`${outputPathArg}`
];
const ffmpeg = spawn(getFFmpegPath(), args);
ffmpeg.stdout.on("data", (data) => {
console.log(data.toString());
});
ffmpeg.stderr.on("data", (data) => {
console.log(data.toString());
});
ffmpeg.on("exit", (code) => {
if (code !== 0) {
console.error("Error extracting frame:", code);
}
});
return outputFilename;
}
function formatTime(seconds: number): string {
// Calculate hours, minutes, seconds, and milliseconds
const hours = Math.floor(seconds / 3600);
const minutes = Math.floor((seconds % 3600) / 60);
const secs = Math.floor(seconds % 60);
const milliseconds = Math.round((seconds % 1) * 1000);
// Format the output with leading zeros
const formattedTime =
[
String(hours).padStart(2, "0"),
String(minutes).padStart(2, "0"),
String(secs).padStart(2, "0")
].join(":") +
"." +
String(milliseconds).padStart(3, "0");
return formattedTime;
}

10
src/global.d.ts vendored
View File

@ -8,12 +8,8 @@ interface Window {
};
electron: {
getScreenshot: () => Promise<string>;
}
api: {
send: (channel: any, data: any) => void,
receive: (channel: any, func: any) => void
},
};
settingsWindow: {
close: () => void,
}
close: () => void;
};
}

View File

@ -2,9 +2,39 @@ import { HashRouter, Routes, Route } from "react-router-dom";
import SettingsPage from "pages/settings";
import "./i18n.ts";
import RewindPage from "pages/rewind";
import './app.css';
import "./app.css";
import { useEffect } from "react";
import { useAtom } from "jotai";
import { apiInfoAtom } from "./state/apiInfo.ts";
declare global {
interface Window {
appGlobal: {
requestApiInfo: () => Promise<{ port: number; apiKey: string }>;
};
}
}
export function App() {
const [apiInfo, setApiInfo] = useAtom(apiInfoAtom);
useEffect(() => {
const fetchApiInfo = async () => {
try {
const info = await window.appGlobal.requestApiInfo();
setApiInfo(info);
} catch (error) {
console.error("Failed to fetch API info:", error);
}
};
fetchApiInfo();
}, [setApiInfo]);
if (!apiInfo) {
return null;
}
return (
<div className="w-screen h-screen">
<HashRouter>

View File

@ -38,7 +38,7 @@ i18n.use(initReactI18next) // passes i18n down to react-i18next
},
ko: {
translation: ko
},
}
},
fallbackLng: "en",

View File

@ -1,11 +1,11 @@
<!doctype html>
<html lang="en">
<head>
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
</head>
<body>
<div id="root"></div>
<script type="module" src="./main.tsx"></script>
</body>
</head>
<body>
<div id="root"></div>
<script type="module" src="./main.tsx"></script>
</body>
</html>

View File

@ -0,0 +1,6 @@
import { atom } from 'jotai'
export const apiInfoAtom = atom({
port: -1,
apiKey: ''
})

View File

@ -4,7 +4,7 @@ const config: Config = {
content: [
"./pages/**/*.{js,ts,jsx,tsx,mdx}",
"./components/**/*.{js,ts,jsx,tsx,mdx}",
"./src/**/*.{js,ts,jsx,tsx,mdx}",
"./src/**/*.{js,ts,jsx,tsx,mdx}"
],
theme: {
extend: {

View File

@ -31,5 +31,5 @@
"src/global.d.ts",
"pages/**/*.tsx",
"components/**/*.tsx"
],
]
}

View File

@ -2,7 +2,7 @@
"compilerOptions": {
"target": "ESNext",
"module": "ESNext",
"moduleResolution": "node",
"moduleResolution": "bundler",
"outDir": "./dist/electron",
"rootDir": "./src/electron",
"strict": true,
@ -11,8 +11,5 @@
"forceConsistentCasingInFileNames": true
},
"include": ["src/electron"],
"references": [
{ "path": "./tsconfig.app.json"},
{ "path": "./tsconfig.node.json"}
]
"references": [{ "path": "./tsconfig.app.json" }, { "path": "./tsconfig.node.json" }]
}

View File

@ -19,7 +19,5 @@
"noFallthroughCasesInSwitch": true,
"noUncheckedSideEffectImports": true
},
"include": [
"vite.config.ts"
]
"include": ["vite.config.ts"]
}

View File

@ -19,10 +19,10 @@ export default defineConfig({
customChunk: (args) => {
// files into pages directory is export in single files
const { id } = args;
if (id.includes('node_modules')) {
return 'vendor';
if (id.includes("node_modules")) {
return "vendor";
} else {
return 'main';
return "main";
}
}
})