Compare commits
No commits in common. "main" and "bc1781e7b5da7bf7a48926085893cf9d12364a79" have entirely different histories.
main
...
bc1781e7b5
50 changed files with 2776 additions and 5022 deletions
10
.gitignore
vendored
10
.gitignore
vendored
|
@ -3,13 +3,3 @@ stock/**/*
|
|||
__pycache__/**/*
|
||||
dist/**/*
|
||||
stock.db
|
||||
|
||||
# dotenv environment variable files
|
||||
.env
|
||||
.env.development.local
|
||||
.env.test.local
|
||||
.env.production.local
|
||||
.env.local
|
||||
|
||||
# Fresh build directory
|
||||
_fresh/
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
# Stock
|
||||
|
||||
[](https://fresh.deno.dev)
|
||||
|
||||
주식 데이터 수집 및 선별하는 파이썬 코드입니다.
|
||||
|
|
24
app.py
Normal file
24
app.py
Normal file
|
@ -0,0 +1,24 @@
|
|||
import flask
|
||||
import argparse
|
||||
|
||||
parser = argparse.ArgumentParser(description="Stock web server")
|
||||
parser.add_argument("--port", type=int, default=12001, help="port number")
|
||||
parser.add_argument("--host", type=str, default="0.0.0.0", help="host address")
|
||||
parser.add_argument("--debug", action="store_true", help="debug mode")
|
||||
|
||||
app = flask.Flask(__name__)
|
||||
|
||||
@app.route("/dist/<m>")
|
||||
def distServe(m:str):
|
||||
return flask.send_from_directory("dist", m)
|
||||
|
||||
@app.route("/")
|
||||
def index():
|
||||
import pages
|
||||
return flask.render_template("index.html", pages = pages.GenLists)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
args = parser.parse_args()
|
||||
app.run(host=args.host, port=args.port, debug=args.debug)
|
||||
|
|
@ -1,12 +0,0 @@
|
|||
import { JSX } from "preact";
|
||||
import { IS_BROWSER } from "$fresh/runtime.ts";
|
||||
|
||||
export function Button(props: JSX.HTMLAttributes<HTMLButtonElement>) {
|
||||
return (
|
||||
<button
|
||||
{...props}
|
||||
disabled={!IS_BROWSER || props.disabled}
|
||||
class="px-2 py-1 border-gray-500 border-2 rounded bg-white hover:bg-gray-200 transition-colors"
|
||||
/>
|
||||
);
|
||||
}
|
2
db.py
2
db.py
|
@ -109,7 +109,7 @@ def update_krx(nday:int = 90):
|
|||
ClosingMonth = ?,
|
||||
Representative = ?,
|
||||
Homepage = ?,
|
||||
AddressArea = ?
|
||||
AddressArea = ?,
|
||||
WHERE Code = ?;
|
||||
""",(row["name"],row["업종"],row["주요제품"],row["상장일"],row["결산월"],row["대표자명"],row["홈페이지"],row["지역"],code
|
||||
))
|
||||
|
|
11
db/db.ts
11
db/db.ts
|
@ -1,11 +0,0 @@
|
|||
import { Kysely, ParseJSONResultsPlugin } from "kysely";
|
||||
import { DB as Sqlite } from "sqlite";
|
||||
import { DenoSqliteDialect } from "./deno-sqlite-dialect.ts";
|
||||
import { Database } from "./type.ts";
|
||||
|
||||
export const db = new Kysely<Database>({
|
||||
dialect: new DenoSqliteDialect({
|
||||
database: new Sqlite("stock.db")
|
||||
}),
|
||||
plugins: [new ParseJSONResultsPlugin()]
|
||||
});
|
|
@ -1,27 +0,0 @@
|
|||
/// The MIT License (MIT)
|
||||
/// Copyright (c) 2023 Alex Gleason
|
||||
/// Copyright (c) 2022 Sami Koskimäki
|
||||
/// https://gitlab.com/soapbox-pub/kysely-deno-sqlite
|
||||
|
||||
import type { SqliteDialectConfig } from 'kysely';
|
||||
|
||||
/** Type compatible with both [dyedgreen/deno-sqlite](https://github.com/dyedgreen/deno-sqlite) and [denodrivers/sqlite3](https://github.com/denodrivers/sqlite3). */
|
||||
type DenoSqlite =
|
||||
& {
|
||||
close(): void;
|
||||
changes: number;
|
||||
lastInsertRowId: number;
|
||||
}
|
||||
& ({
|
||||
queryEntries(sql: string, params: any): unknown[];
|
||||
} | {
|
||||
prepare(sql: string): {
|
||||
all(...params: any): unknown[];
|
||||
};
|
||||
});
|
||||
|
||||
interface DenoSqliteDialectConfig extends Omit<SqliteDialectConfig, 'database'> {
|
||||
database: DenoSqlite | (() => Promise<DenoSqlite>);
|
||||
}
|
||||
|
||||
export type { DenoSqlite, DenoSqliteDialectConfig };
|
|
@ -1,49 +0,0 @@
|
|||
/// The MIT License (MIT)
|
||||
/// Copyright (c) 2023 Alex Gleason
|
||||
/// Copyright (c) 2022 Sami Koskimäki
|
||||
/// https://gitlab.com/soapbox-pub/kysely-deno-sqlite
|
||||
|
||||
import {
|
||||
type DatabaseIntrospector,
|
||||
type Dialect,
|
||||
type DialectAdapter,
|
||||
type Driver,
|
||||
Kysely,
|
||||
type QueryCompiler,
|
||||
SqliteAdapter,
|
||||
SqliteIntrospector,
|
||||
SqliteQueryCompiler,
|
||||
} from 'kysely';
|
||||
|
||||
import { DenoSqliteDriver } from './kysely-sqlite-driver.ts';
|
||||
|
||||
import type { DenoSqliteDialectConfig } from './deno-sqlite-dialect-config.ts';
|
||||
|
||||
class DenoSqliteDialect implements Dialect {
|
||||
readonly #config: DenoSqliteDialectConfig;
|
||||
|
||||
constructor(config: DenoSqliteDialectConfig) {
|
||||
this.#config = Object.freeze({ ...config });
|
||||
}
|
||||
|
||||
createDriver(): Driver {
|
||||
return new DenoSqliteDriver(this.#config);
|
||||
}
|
||||
|
||||
createQueryCompiler(): QueryCompiler {
|
||||
return new SqliteQueryCompiler();
|
||||
}
|
||||
|
||||
createAdapter(): DialectAdapter {
|
||||
return new SqliteAdapter();
|
||||
}
|
||||
|
||||
// deno-lint-ignore no-explicit-any
|
||||
createIntrospector(db: Kysely<any>): DatabaseIntrospector {
|
||||
return new SqliteIntrospector(db);
|
||||
}
|
||||
}
|
||||
|
||||
export { DenoSqliteDialect };
|
||||
|
||||
|
|
@ -1,112 +0,0 @@
|
|||
/// The MIT License (MIT)
|
||||
/// Copyright (c) 2023 Alex Gleason
|
||||
/// Copyright (c) 2022 Sami Koskimäki
|
||||
/// https://gitlab.com/soapbox-pub/kysely-deno-sqlite
|
||||
|
||||
import { CompiledQuery, type DatabaseConnection, type Driver, type QueryResult } from 'kysely';
|
||||
|
||||
import type { DenoSqlite, DenoSqliteDialectConfig } from './deno-sqlite-dialect-config.ts';
|
||||
|
||||
class DenoSqliteDriver implements Driver {
|
||||
readonly #config: DenoSqliteDialectConfig;
|
||||
readonly #connectionMutex = new ConnectionMutex();
|
||||
|
||||
#db?: DenoSqlite;
|
||||
#connection?: DatabaseConnection;
|
||||
|
||||
constructor(config: DenoSqliteDialectConfig) {
|
||||
this.#config = Object.freeze({ ...config });
|
||||
}
|
||||
|
||||
async init(): Promise<void> {
|
||||
this.#db = typeof this.#config.database === 'function' ? await this.#config.database() : this.#config.database;
|
||||
|
||||
this.#connection = new DenoSqliteConnection(this.#db);
|
||||
|
||||
if (this.#config.onCreateConnection) {
|
||||
await this.#config.onCreateConnection(this.#connection);
|
||||
}
|
||||
}
|
||||
|
||||
async acquireConnection(): Promise<DatabaseConnection> {
|
||||
// SQLite only has one single connection. We use a mutex here to wait
|
||||
// until the single connection has been released.
|
||||
await this.#connectionMutex.lock();
|
||||
return this.#connection!;
|
||||
}
|
||||
|
||||
async beginTransaction(connection: DatabaseConnection): Promise<void> {
|
||||
await connection.executeQuery(CompiledQuery.raw('begin'));
|
||||
}
|
||||
|
||||
async commitTransaction(connection: DatabaseConnection): Promise<void> {
|
||||
await connection.executeQuery(CompiledQuery.raw('commit'));
|
||||
}
|
||||
|
||||
async rollbackTransaction(connection: DatabaseConnection): Promise<void> {
|
||||
await connection.executeQuery(CompiledQuery.raw('rollback'));
|
||||
}
|
||||
|
||||
// deno-lint-ignore require-await
|
||||
async releaseConnection(): Promise<void> {
|
||||
this.#connectionMutex.unlock();
|
||||
}
|
||||
|
||||
// deno-lint-ignore require-await
|
||||
async destroy(): Promise<void> {
|
||||
this.#db?.close();
|
||||
}
|
||||
}
|
||||
|
||||
class DenoSqliteConnection implements DatabaseConnection {
|
||||
readonly #db: DenoSqlite;
|
||||
|
||||
constructor(db: DenoSqlite) {
|
||||
this.#db = db;
|
||||
}
|
||||
|
||||
executeQuery<O>({ sql, parameters }: CompiledQuery): Promise<QueryResult<O>> {
|
||||
const rows = 'queryEntries' in this.#db
|
||||
? this.#db.queryEntries(sql, parameters)
|
||||
: this.#db.prepare(sql).all(...parameters);
|
||||
|
||||
const { changes, lastInsertRowId } = this.#db;
|
||||
|
||||
return Promise.resolve({
|
||||
rows: rows as O[],
|
||||
numAffectedRows: BigInt(changes),
|
||||
insertId: BigInt(lastInsertRowId),
|
||||
});
|
||||
}
|
||||
|
||||
// deno-lint-ignore require-yield
|
||||
async *streamQuery<R>(): AsyncIterableIterator<QueryResult<R>> {
|
||||
throw new Error('Sqlite driver doesn\'t support streaming');
|
||||
}
|
||||
}
|
||||
|
||||
class ConnectionMutex {
|
||||
#promise?: Promise<void>;
|
||||
#resolve?: () => void;
|
||||
|
||||
async lock(): Promise<void> {
|
||||
while (this.#promise) {
|
||||
await this.#promise;
|
||||
}
|
||||
|
||||
this.#promise = new Promise((resolve) => {
|
||||
this.#resolve = resolve;
|
||||
});
|
||||
}
|
||||
|
||||
unlock(): void {
|
||||
const resolve = this.#resolve;
|
||||
|
||||
this.#promise = undefined;
|
||||
this.#resolve = undefined;
|
||||
|
||||
resolve?.();
|
||||
}
|
||||
}
|
||||
|
||||
export { DenoSqliteDriver };
|
64
db/type.ts
64
db/type.ts
|
@ -1,64 +0,0 @@
|
|||
import { ColumnType, Generated, Insertable, Selectable, Updateable } from "kysely";
|
||||
|
||||
/**
|
||||
* "Code" TEXT,
|
||||
"Date" TEXT,
|
||||
"Close" INTEGER NOT NULL,
|
||||
"Diff" INTEGER NOT NULL,
|
||||
"Open" INTEGER NOT NULL,
|
||||
"High" INTEGER NOT NULL,
|
||||
"Low" INTEGER NOT NULL,
|
||||
"Volume" INTEGER NOT NULL,
|
||||
*/
|
||||
|
||||
export interface StockTable {
|
||||
Code: string;
|
||||
Date: string;
|
||||
Close: number;
|
||||
Diff: number;
|
||||
Open: number;
|
||||
High: number;
|
||||
Low: number;
|
||||
Volume: number;
|
||||
}
|
||||
|
||||
export interface KRXCorpTable{
|
||||
Name: string;
|
||||
/**
|
||||
* PK
|
||||
*/
|
||||
Code: string;
|
||||
|
||||
Sector: string;
|
||||
Product: string;
|
||||
ListingDay: string;
|
||||
ClosingMonth: string;
|
||||
Representative: string;
|
||||
Homepage: string;
|
||||
AddressArea: string;
|
||||
LastUpdate: string;
|
||||
}
|
||||
|
||||
export interface KOSPITable{
|
||||
Name: string;
|
||||
/**
|
||||
* PK
|
||||
*/
|
||||
Code: string;
|
||||
}
|
||||
|
||||
export interface KOSDAQTable{
|
||||
Name: string;
|
||||
/**
|
||||
* PK
|
||||
*/
|
||||
Code: string;
|
||||
}
|
||||
|
||||
|
||||
export interface Database {
|
||||
stock: StockTable;
|
||||
KRXCorp: KRXCorpTable;
|
||||
KOSPI: KOSPITable;
|
||||
KOSDAQ: KOSDAQTable;
|
||||
}
|
29
deno.json
29
deno.json
|
@ -1,29 +0,0 @@
|
|||
{
|
||||
"lock": false,
|
||||
"tasks": {
|
||||
"start": "deno run -A --watch=static/,routes/ dev.ts",
|
||||
"prod_start": "deno run -A main.ts",
|
||||
"update": "deno run -A -r https://fresh.deno.dev/update .",
|
||||
"build": "deno run -A dev.ts build",
|
||||
"preview": "deno run -A main.ts"
|
||||
},
|
||||
"lint": { "rules": { "tags": ["fresh", "recommended"] } },
|
||||
"imports": {
|
||||
"$fresh/": "https://deno.land/x/fresh@1.5.2/",
|
||||
"preact": "https://esm.sh/preact@10.18.1",
|
||||
"preact/": "https://esm.sh/preact@10.18.1/",
|
||||
"preact-render-to-string": "https://esm.sh/*preact-render-to-string@6.2.2",
|
||||
"@preact/signals": "https://esm.sh/*@preact/signals@1.2.1",
|
||||
"@preact/signals-core": "https://esm.sh/*@preact/signals-core@1.5.0",
|
||||
"auto-animate": "https://esm.sh/@formkit/auto-animate@0.7.0",
|
||||
"auto-animate/": "https://esm.sh/@formkit/auto-animate@0.7.0/",
|
||||
"twind": "https://esm.sh/twind@0.16.19",
|
||||
"twind/": "https://esm.sh/twind@0.16.19/",
|
||||
"$std/": "https://deno.land/std@0.203.0/",
|
||||
"kysely": "npm:kysely@^0.26.3",
|
||||
"kysely/helpers/sqlite": "npm:kysely@^0.26.3/helpers/sqlite",
|
||||
"sqlite": "https://deno.land/x/sqlite@v3.8/mod.ts"
|
||||
},
|
||||
"compilerOptions": { "jsx": "react-jsx", "jsxImportSource": "preact" },
|
||||
"exclude": ["**/_fresh/*"]
|
||||
}
|
5
dev.ts
5
dev.ts
|
@ -1,5 +0,0 @@
|
|||
#!/usr/bin/env -S deno run -A --watch=static/,routes/
|
||||
|
||||
import dev from "$fresh/dev.ts";
|
||||
|
||||
await dev(import.meta.url, "./main.ts");
|
44
fresh.gen.ts
44
fresh.gen.ts
|
@ -1,44 +0,0 @@
|
|||
// DO NOT EDIT. This file is generated by Fresh.
|
||||
// This file SHOULD be checked into source version control.
|
||||
// This file is automatically updated during development when running `dev.ts`.
|
||||
|
||||
import * as $0 from "./routes/_404.tsx";
|
||||
import * as $1 from "./routes/_app.tsx";
|
||||
import * as $2 from "./routes/api/corps/[index].ts";
|
||||
import * as $3 from "./routes/api/corps/index.ts";
|
||||
import * as $4 from "./routes/api/joke.ts";
|
||||
import * as $5 from "./routes/api/kosdaq.ts";
|
||||
import * as $6 from "./routes/api/kospi.ts";
|
||||
import * as $7 from "./routes/api/pages/[name].ts";
|
||||
import * as $8 from "./routes/api/pages/index.ts";
|
||||
import * as $9 from "./routes/greet/[name].tsx";
|
||||
import * as $10 from "./routes/index.tsx";
|
||||
import * as $11 from "./routes/pages/[name].tsx";
|
||||
import * as $$0 from "./islands/Counter.tsx";
|
||||
import * as $$1 from "./islands/Search.tsx";
|
||||
import * as $$2 from "./islands/StockList.tsx";
|
||||
|
||||
const manifest = {
|
||||
routes: {
|
||||
"./routes/_404.tsx": $0,
|
||||
"./routes/_app.tsx": $1,
|
||||
"./routes/api/corps/[index].ts": $2,
|
||||
"./routes/api/corps/index.ts": $3,
|
||||
"./routes/api/joke.ts": $4,
|
||||
"./routes/api/kosdaq.ts": $5,
|
||||
"./routes/api/kospi.ts": $6,
|
||||
"./routes/api/pages/[name].ts": $7,
|
||||
"./routes/api/pages/index.ts": $8,
|
||||
"./routes/greet/[name].tsx": $9,
|
||||
"./routes/index.tsx": $10,
|
||||
"./routes/pages/[name].tsx": $11,
|
||||
},
|
||||
islands: {
|
||||
"./islands/Counter.tsx": $$0,
|
||||
"./islands/Search.tsx": $$1,
|
||||
"./islands/StockList.tsx": $$2,
|
||||
},
|
||||
baseUrl: import.meta.url,
|
||||
};
|
||||
|
||||
export default manifest;
|
245
gen.py
245
gen.py
|
@ -5,6 +5,7 @@ import sqlite3
|
|||
from typing import Dict, List
|
||||
from render import *
|
||||
import db as database
|
||||
from jinja2 import Environment, PackageLoader, select_autoescape
|
||||
import pandas as pd
|
||||
import tqdm
|
||||
|
||||
|
@ -16,19 +17,6 @@ class DataStore:
|
|||
def getAllKRXCorp(self) -> List[database.KRXCorp]:
|
||||
return database.GetAllKRXCorp(self.db)
|
||||
|
||||
def _getCorpsInCorpGroup(self, table_name: str) -> List[database.KRXCorp]:
|
||||
cursor = self.db.execute(f"select c.* from {table_name} as k INNER JOIN KRXCorp as c on k.Name = c.Name")
|
||||
return [database.KRXCorp.from_db(r) for r in cursor]
|
||||
|
||||
def getKosdaq(self) -> List[database.KRXCorp]:
|
||||
return self._getCorpsInCorpGroup("KOSDAQ")
|
||||
|
||||
def getKospi(self) -> List[database.KRXCorp]:
|
||||
return self._getCorpsInCorpGroup("KOSPI")
|
||||
|
||||
def getKosdaqAndKospi(self) -> List[database.KRXCorp]:
|
||||
return self.getKospi() + self.getKosdaq()
|
||||
|
||||
def getStockPrice(self,code,length) -> pd.DataFrame:
|
||||
if code in self.pricesCache and len(self.pricesCache[code]) >= length:
|
||||
return self.pricesCache[code]
|
||||
|
@ -87,182 +75,77 @@ def isVolumeMulPriceGreaterThan(stock: pd.DataFrame, threshold: int, nday: int)
|
|||
return stock.iloc[nday]['VOLUME'] * stock.iloc[nday]['CLOSE'] > threshold
|
||||
|
||||
def isMACDCrossSignal(signal: pd.Series, macd: pd.Series, nday: int, order=1) -> bool:
|
||||
return (signal.iloc[nday] > macd.iloc[nday] and
|
||||
signal.iloc[nday+order] < macd.iloc[nday+order])
|
||||
return (signal.iloc[nday] < macd.iloc[nday] and
|
||||
signal.iloc[nday+order] > macd.iloc[nday+order])
|
||||
|
||||
def isRelativeDiffLessThan(a:pd.Series,b:pd.Series, threshold: float,nday:int) -> bool:
|
||||
return abs(a.iloc[nday] - b.iloc[nday + 1]) / b.iloc[nday + 1] < threshold
|
||||
return (a.iloc[nday] - b.iloc[nday]) / b.iloc[nday] < threshold
|
||||
|
||||
def isDiffGreaterThan(a:pd.Series,b:pd.Series, nday:int) -> bool:
|
||||
"""a is bigger than b"""
|
||||
return (a.iloc[nday] > b.iloc[nday])
|
||||
|
||||
def calc_rsi(price: pd.Series, period: int = 14):
|
||||
delta = price.diff()
|
||||
up, down = delta.copy(), delta.copy()
|
||||
up[up < 0] = 0
|
||||
down[down > 0] = 0
|
||||
roll_up1 = up.rolling(period).mean()
|
||||
roll_down1 = down.abs().rolling(period).mean()
|
||||
RS1 = roll_up1 / roll_down1
|
||||
return pd.Series(100 - (100 / (1 + RS1)), name='RSI')
|
||||
|
||||
def prepareCollector(collector: OutputCollector) -> None:
|
||||
import pages
|
||||
for item in pages.GenLists:
|
||||
collector.addResult(item["name"], item["description"])
|
||||
|
||||
def every(f, xs):
|
||||
for x in xs:
|
||||
if not f(x):
|
||||
return False
|
||||
return True
|
||||
|
||||
def collect(data: DataStore, collector: OutputCollector, corp: database.KRXCorp
|
||||
, ndays: List[int]) -> None:
|
||||
stock = data.getStockPrice(corp.Code,250)
|
||||
if len(stock) <= 245:
|
||||
, nday: int) -> None:
|
||||
stock = data.getStockPrice(corp.Code,70)
|
||||
if len(stock) < 70:
|
||||
return
|
||||
for nday in ndays:
|
||||
if (stock.iloc[nday]['VOLUME'] <= 0):
|
||||
return
|
||||
|
||||
close = stock["CLOSE"]
|
||||
openv = stock["OPEN"]
|
||||
#high = stock["HIGH"]
|
||||
#low = stock["LOW"]
|
||||
#d3 = close.loc[::-1].rolling(window=3
|
||||
# ).mean().dropna().loc[::-1]
|
||||
fetch_len = len(ndays) + 10
|
||||
def d(n):
|
||||
return close.iloc[:(n+fetch_len)].loc[::-1].rolling(window=n
|
||||
d5 = stock["CLOSE"].loc[::-1].rolling(window=5
|
||||
).mean().dropna().loc[::-1]
|
||||
d20 = stock["CLOSE"].loc[::-1].rolling(window=20
|
||||
).mean().dropna().loc[::-1]
|
||||
d60 = stock["CLOSE"].loc[::-1].rolling(window=60
|
||||
).mean().dropna().loc[::-1]
|
||||
|
||||
def d_std(n):
|
||||
return close.iloc[:(n+fetch_len)].loc[::-1].rolling(window=n
|
||||
).std().dropna().loc[::-1]
|
||||
d5 = d(5)
|
||||
d20 = d(20)
|
||||
d25 = d(25)
|
||||
d45 = d(45)
|
||||
d60 = d(60)
|
||||
d120 = d(120)
|
||||
d240 = d(240)
|
||||
if (isRelativeDiffLessThan(d5, d20, 0.01, nday) and
|
||||
isRelativeDiffLessThan(d5, d60, 0.01, nday)):
|
||||
collector.collect("cross 2", corp, stock.index[nday])
|
||||
if (isVolumeNTimes(stock, 3, 0) and
|
||||
isVolumeMulPriceGreaterThan(stock, 100000, nday)):
|
||||
collector.collect("cross 3", corp, stock.index[nday])
|
||||
|
||||
# 표준편차
|
||||
d_std25 = d_std(25)
|
||||
|
||||
bollinger_upperband = d25 + 2* d_std25
|
||||
|
||||
a = [d5, d20, d45]
|
||||
for nday in ndays:
|
||||
if openv[nday] <= d240[nday] and d240[nday] <= close[nday] and d240[nday + 1] < d240[nday]:
|
||||
collector.collect("양봉사이240일선증가", corp, stock.index[nday])
|
||||
|
||||
if d5[nday + 1] < d5[nday] and d5[nday + 2] > d5[nday + 1] and d20[nday + 1] < d20[nday]:
|
||||
collector.collect("5일선반등120선증가", corp, stock.index[nday])
|
||||
|
||||
if openv[nday] <= d20[nday] and d20[nday] <= close[nday]:
|
||||
collector.collect("양봉사이20일선", corp, stock.index[nday])
|
||||
|
||||
if bollinger_upperband[nday] <= close[nday]:
|
||||
collector.collect("볼린저 밴드 25", corp, stock.index[nday])
|
||||
|
||||
if every(lambda i: isRelativeDiffLessThan(i,close,0.05,nday), a):
|
||||
collector.collect("뭉침", corp, stock.index[nday])
|
||||
if d120[nday + 1] < d120[nday]:
|
||||
collector.collect("뭉침5% 120선 상승", corp, stock.index[nday])
|
||||
|
||||
if every(lambda i: isRelativeDiffLessThan(i,close,0.01,nday), a):
|
||||
collector.collect("뭉침01", corp, stock.index[nday])
|
||||
if d120[nday + 1] < d120[nday]:
|
||||
collector.collect("뭉침1% 120선 상승", corp, stock.index[nday])
|
||||
|
||||
if every(lambda i: isRelativeDiffLessThan(i,close,0.03,nday), a):
|
||||
collector.collect("뭉침03", corp, stock.index[nday])
|
||||
if d120[nday + 1] < d120[nday]:
|
||||
collector.collect("뭉침3% 120선 상승", corp, stock.index[nday])
|
||||
|
||||
if (d5[nday] > d20[nday] and d5[nday + 1] < d20[nday + 1]):
|
||||
collector.collect("cross d20 and d5", corp, stock.index[nday])
|
||||
|
||||
if (isDiffGreaterThan(d5, d20, nday)):
|
||||
collector.collect("d20d5", corp, stock.index[nday])
|
||||
if (isVolumeNTimes(stock, 5, nday)):
|
||||
collector.collect("d20d5VolumeX5", corp, stock.index[nday])
|
||||
|
||||
if (isVolumeNTimes(stock, 3, nday)):
|
||||
collector.collect("volume", corp, stock.index[nday])
|
||||
|
||||
if (isVolumeMulPriceGreaterThan(stock, 50000000, nday)):
|
||||
collector.collect("volume5", corp, stock.index[nday])
|
||||
if (isRelativeDiffLessThan(d20, d60, 0.01, nday) and
|
||||
isVolumeMulPriceGreaterThan(stock, 1000000, nday)):
|
||||
collector.collect("cross 4", corp, stock.index[nday])
|
||||
|
||||
if (isDiffGreaterThan(d5, d20, nday)):
|
||||
collector.collect("d20d5", corp, stock.index[nday])
|
||||
if (isVolumeNTimes(stock, 5, nday)):
|
||||
collector.collect("volumeX5", corp, stock.index[nday])
|
||||
D240BiggerThanYesterDay = d240[nday + 1] <= d240[nday]
|
||||
D240Bounce = d240[nday + 2] >= d240[nday + 1] and D240BiggerThanYesterDay
|
||||
D120Bounce = d120[nday + 2] >= d120[nday + 1] and d120[nday + 1] <= d120[nday]
|
||||
# D240Cross = low[nday] <= d240[nday] and d240[nday] <= high[nday]
|
||||
if (D240Bounce):
|
||||
collector.collect("240일선 반등",corp,stock.index[nday])
|
||||
if (D120Bounce):
|
||||
collector.collect("120일선 반등",corp,stock.index[nday])
|
||||
collector.collect("d20d5VolumeX5", corp, stock.index[nday])
|
||||
|
||||
if (D240BiggerThanYesterDay):
|
||||
collector.collect("240일 증가", corp, stock.index[nday])
|
||||
if (isRelativeDiffLessThan(d5, d20, 0.03, nday) and
|
||||
isRelativeDiffLessThan(d5, d60, 0.03, nday) and
|
||||
isVolumeNTimes(stock, 3, nday)):
|
||||
collector.collect("DiffDistance", corp, stock.index[nday])
|
||||
|
||||
if max([d[nday] for d in (d20, d60, d120)]) < min(close[nday], openv[nday]):
|
||||
collector.collect("떠있음", corp, stock.index[nday])
|
||||
if (isVolumeNTimes(stock, 3, nday)):
|
||||
collector.collect("volume", corp, stock.index[nday])
|
||||
|
||||
if (d60[nday + 1] < d60[nday]):
|
||||
collector.collect("정배열60", corp, stock.index[nday])
|
||||
if d5[nday + 2] >= d5[nday + 1] and d5[nday + 1] <= d5[nday]:
|
||||
collector.collect("60일 5일선 반등", corp, stock.index[nday])
|
||||
if (isVolumeMulPriceGreaterThan(stock, 50000000, nday)):
|
||||
collector.collect("volume5", corp, stock.index[nday])
|
||||
|
||||
if (d20[nday + 1] < d20[nday]):
|
||||
collector.collect("정배열20", corp, stock.index[nday])
|
||||
if (D240BiggerThanYesterDay):
|
||||
collector.collect("정배열240", corp, stock.index[nday])
|
||||
if(d5[nday + 1] <= d5[nday] and
|
||||
d120[nday + 1] <= d120[nday]):
|
||||
collector.collect("모두 정배열", corp, stock.index[nday])
|
||||
if (isVolumeNTimes(stock, 5, nday)):
|
||||
collector.collect("volumeX5", corp, stock.index[nday])
|
||||
|
||||
if(d120[nday + 1] <= d120[nday] and
|
||||
d120[nday + 1] < d240[nday] and
|
||||
d120[nday] >= d240[nday]):
|
||||
collector.collect("120선240선추월", corp, stock.index[nday])
|
||||
ewm12 = stock["CLOSE"].loc[::-1].ewm(span=12).mean().loc[::-1]
|
||||
ewm26 = stock["CLOSE"].loc[::-1].ewm(span=26).mean().loc[::-1]
|
||||
macd = (ewm12 - ewm26)
|
||||
signal = macd.ewm(span=9).mean()
|
||||
|
||||
if (d5[nday + 1] < d20[nday + 1] and d20[nday] < d5[nday]):
|
||||
collector.collect("d20d5돌파", corp, stock.index[nday])
|
||||
|
||||
ewm5 = close.loc[::-1].ewm(span=5).mean().loc[::-1]
|
||||
ewm10 = close.loc[::-1].ewm(span=10).mean().loc[::-1]
|
||||
macd = (ewm5 - ewm10)
|
||||
signal = macd.loc[::-1].ewm(span=4).mean().loc[::-1]
|
||||
|
||||
rsi = calc_rsi(close.loc[::-1],14).dropna().loc[::-1]
|
||||
rsi.reset_index(drop = True, inplace = True)
|
||||
|
||||
for nday in ndays:
|
||||
if (isMACDCrossSignal(macd, signal, nday)):
|
||||
collector.collect("macd", corp, stock.index[nday])
|
||||
|
||||
if (d45[2+nday] > d45[1+nday] and d45[1+nday] < d45[nday]):
|
||||
collector.collect("45일선 반등",corp, stock.index[nday])
|
||||
|
||||
if(d20[2+nday] > d20[1+nday] and d20[1+nday] < d20[nday]):
|
||||
collector.collect("20일선 반등",corp, stock.index[nday])
|
||||
|
||||
for nday in ndays:
|
||||
if(rsi[nday] < 30):
|
||||
collector.collect("RSI 30 이하", corp, stock.index[nday])
|
||||
|
||||
#rsi_signal = macd.loc[::-1].ewm(span=7).mean().loc[::-1]
|
||||
if (isMACDCrossSignal(macd, signal, nday)):
|
||||
collector.collect("macd", corp, stock.index[nday])
|
||||
|
||||
parser = argparse.ArgumentParser(description="주식 검색 정보를 출력합니다.")
|
||||
parser.add_argument("--format", "-f", choices=["json", "html"], default="html",
|
||||
help="출력 포맷을 지정합니다. 기본값은 html입니다.")
|
||||
parser.add_argument("--dir", "-d", default=".", help="출력할 폴더를 지정합니다.")
|
||||
parser.add_argument("--corp", "-c", help="주식 코드를 지정합니다. 지정하지 않으면 kosdaq과 kospi만 검색합니다.")
|
||||
parser.add_argument("--fullSearch", help="모든 주식을 검색합니다.", action='store_true')
|
||||
parser.add_argument("--corp", "-c", help="주식 코드를 지정합니다. 지정하지 않으면 모든 주식을 검색합니다.")
|
||||
parser.add_argument("--printStdout", action="store_true", help="출력한 결과를 표준 출력으로 출력합니다.")
|
||||
parser.add_argument("--version", "-v", action="version", version="%(prog)s 1.0")
|
||||
parser.add_argument("--verbose", "-V", action="store_true", help="출력할 내용을 자세히 표시합니다.")
|
||||
|
@ -270,27 +153,43 @@ parser.add_argument("--verbose", "-V", action="store_true", help="출력할 내
|
|||
if __name__ == "__main__":
|
||||
args = parser.parse_args()
|
||||
dataStore = DataStore()
|
||||
if args.fullSearch:
|
||||
krx_corps = dataStore.getAllKRXCorp()
|
||||
else:
|
||||
krx_corps = dataStore.getKosdaqAndKospi()
|
||||
|
||||
krx_corps = dataStore.getAllKRXCorp()
|
||||
if args.corp:
|
||||
krx_corps = [corp for corp in krx_corps if corp.Code == args.corp]
|
||||
|
||||
env = Environment(
|
||||
loader=PackageLoader('render', 'templates'),
|
||||
autoescape=select_autoescape(['html', 'xml'])
|
||||
)
|
||||
collector = OutputCollector()
|
||||
prepareCollector(collector)
|
||||
|
||||
for corp in tqdm.tqdm(krx_corps):
|
||||
ndays = [nday for nday in range(0, 5)]
|
||||
collect(dataStore, collector, corp, ndays)
|
||||
for nday in range(0, 5):
|
||||
collect(dataStore, collector, corp, nday)
|
||||
dataStore.clearCache()
|
||||
|
||||
for k,v in collector.data.items():
|
||||
data = json.dumps(v.toDict(), ensure_ascii=False)
|
||||
if args.printStdout:
|
||||
print(k)
|
||||
print(data)
|
||||
if args.format == "json":
|
||||
data = json.dumps(v.toDict(), indent=4, ensure_ascii=False)
|
||||
if args.printStdout:
|
||||
print(k)
|
||||
print(data)
|
||||
else:
|
||||
with open(os.path.join(args.dir, k + ".json", encoding="UTF-8"), "w") as f:
|
||||
f.write(data)
|
||||
else:
|
||||
with open(os.path.join(args.dir, k + ".json"), "w", encoding="UTF-8") as f:
|
||||
f.write(data)
|
||||
template = env.get_template("Lists.html")
|
||||
|
||||
days = v.corpListByDate.keys()
|
||||
days = list(days)
|
||||
days.append(datetime.date.today().isoformat())
|
||||
days.sort(reverse=True)
|
||||
days = days[:5]
|
||||
|
||||
html = template.render(collected=v, title=k, days=days)
|
||||
if args.printStdout:
|
||||
print(html)
|
||||
else:
|
||||
with open(os.path.join(args.dir, k + ".html"), "w", encoding="UTF-8") as f:
|
||||
f.write(html)
|
|
@ -1,16 +0,0 @@
|
|||
import type { Signal } from "@preact/signals";
|
||||
import { Button } from "../components/Button.tsx";
|
||||
|
||||
interface CounterProps {
|
||||
count: Signal<number>;
|
||||
}
|
||||
|
||||
export default function Counter(props: CounterProps) {
|
||||
return (
|
||||
<div class="flex gap-8 py-6">
|
||||
<Button onClick={() => props.count.value -= 1}>-1</Button>
|
||||
<p class="text-3xl">{props.count}</p>
|
||||
<Button onClick={() => props.count.value += 1}>+1</Button>
|
||||
</div>
|
||||
);
|
||||
}
|
|
@ -1,5 +0,0 @@
|
|||
export default function Search(){
|
||||
return <div>
|
||||
<div>div</div>
|
||||
</div>
|
||||
}
|
|
@ -1,229 +0,0 @@
|
|||
import { Button } from "../components/Button.tsx";
|
||||
import { useEffect, useLayoutEffect, useRef } from "preact/hooks";
|
||||
import { ComponentChildren } from "preact";
|
||||
import { Signal, useSignal } from "@preact/signals";
|
||||
import { IS_BROWSER } from "$fresh/runtime.ts";
|
||||
import { mapValues } from "$std/collections/map_values.ts";
|
||||
import { useAsync } from "../util/util.ts";
|
||||
import {
|
||||
Coperation,
|
||||
CorpSimple,
|
||||
fetchKosdaqList,
|
||||
fetchKospiList,
|
||||
fetchPageInfo,
|
||||
PageCorpsInfo,
|
||||
} from "../util/api.ts";
|
||||
|
||||
interface StockProps {
|
||||
pageName: string;
|
||||
}
|
||||
|
||||
interface ToggleButtonProps {
|
||||
disabled?: boolean;
|
||||
toggle: Signal<boolean>;
|
||||
children?: ComponentChildren;
|
||||
}
|
||||
|
||||
function ToggleButton(props: ToggleButtonProps) {
|
||||
const { disabled, toggle, ...rest } = props;
|
||||
return (
|
||||
<button
|
||||
{...rest}
|
||||
disabled={!IS_BROWSER || disabled}
|
||||
onClick={() => toggle.value = !toggle.value}
|
||||
class={"px-2 py-1 border-2 rounded transition-colors" + (
|
||||
toggle.value
|
||||
? "border-gray-500 bg-white hover:bg-gray-200"
|
||||
: "border-gray-200 bg-gray-800 hover:bg-gray-500 text-white"
|
||||
)}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
function StockListByDate(
|
||||
{ prevSet, rows, name }: {
|
||||
prevSet: Set<string>;
|
||||
rows: Coperation[];
|
||||
name: string;
|
||||
},
|
||||
) {
|
||||
const lastCount = useRef(rows.length);
|
||||
const curCount = rows.length;
|
||||
const parent = useRef<HTMLDivElement>(null);
|
||||
const controller = useRef<
|
||||
{
|
||||
isEnabled: () => boolean;
|
||||
disable: () => void;
|
||||
enable: () => void;
|
||||
} | undefined
|
||||
>();
|
||||
useEffect(() => {
|
||||
(async () => {
|
||||
console.log("animation mount on ", name);
|
||||
const { default: autoAnimate } = await import(
|
||||
"https://esm.sh/@formkit/auto-animate@0.7.0"
|
||||
);
|
||||
if (parent.current) {
|
||||
const cntr = autoAnimate(parent.current);
|
||||
controller.current = cntr;
|
||||
}
|
||||
})();
|
||||
}, [parent]);
|
||||
|
||||
useLayoutEffect(() => {
|
||||
if (controller.current) {
|
||||
if (Math.abs(curCount - lastCount.current) > 200) {
|
||||
console.log("disable animation", curCount, "from", lastCount.current);
|
||||
controller.current.disable();
|
||||
} else {
|
||||
console.log("enable animation", curCount, "from", lastCount.current);
|
||||
controller.current.enable();
|
||||
}
|
||||
lastCount.current = curCount;
|
||||
}
|
||||
}, [parent, rows]);
|
||||
|
||||
return (
|
||||
<div ref={parent}>
|
||||
<h2 class="text-lg">{name}</h2>
|
||||
{rows.map((row) => {
|
||||
const firstOccur = !prevSet.has(row.Code);
|
||||
return (
|
||||
<div
|
||||
key={row.Code}
|
||||
class={[
|
||||
"bg-white",
|
||||
firstOccur ? "text-[#ff5454] underline" : "text-black",
|
||||
].join(" ")}
|
||||
>
|
||||
<a href={`https://stockplus.com/m/stocks/KOREA-A${row.Code}`}>
|
||||
{row.Name}
|
||||
</a>
|
||||
</div>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function StockList({ data }: { data: PageCorpsInfo }) {
|
||||
console.log("data");
|
||||
|
||||
const corpListByDate = data.corpListByDate;
|
||||
const keys = Object.keys(corpListByDate).sort().reverse().slice(0, 5)
|
||||
.reverse();
|
||||
const sets = keys.map((x) => new Set(corpListByDate[x].map((y) => y.Code)));
|
||||
//const rows = data.corpListbyDate;
|
||||
return (
|
||||
<div class="flex">
|
||||
{keys.map((x, i) => {
|
||||
const prevSet = i == 0 ? new Set<string>() : sets[i - 1];
|
||||
const rows = corpListByDate[x];
|
||||
return (
|
||||
<StockListByDate key={x} name={x} prevSet={prevSet} rows={rows} />
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
type FilterInfoOption = {
|
||||
list: {
|
||||
items: CorpSimple[];
|
||||
include: boolean;
|
||||
}[];
|
||||
otherwise: boolean;
|
||||
};
|
||||
|
||||
function filterInfo(info: Coperation[], filterList: FilterInfoOption) {
|
||||
const checkMap = new Map<string, boolean>();
|
||||
for (const l of filterList.list) {
|
||||
for (const i of l.items) {
|
||||
checkMap.set(i.Code, l.include);
|
||||
}
|
||||
}
|
||||
return info.filter((x) => {
|
||||
const v = checkMap.get(x.Code);
|
||||
if (v === undefined) {
|
||||
return filterList.otherwise;
|
||||
} else {
|
||||
return v;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export default function StockListUI(props: StockProps) {
|
||||
const sig = useAsync<[PageCorpsInfo, CorpSimple[], CorpSimple[]]>(() =>
|
||||
Promise.all([
|
||||
fetchPageInfo(props.pageName),
|
||||
fetchKospiList(),
|
||||
fetchKosdaqList(),
|
||||
])
|
||||
);
|
||||
const viewKospi = useSignal(true);
|
||||
const viewKosdaq = useSignal(false);
|
||||
const viewOtherwise = useSignal(false);
|
||||
return (
|
||||
<div class="my-2">
|
||||
<div class="flex gap-2">
|
||||
<ToggleButton toggle={viewKospi}>Kospi</ToggleButton>
|
||||
<ToggleButton toggle={viewKosdaq}>Kosdaq</ToggleButton>
|
||||
<ToggleButton toggle={viewOtherwise}>Otherwise</ToggleButton>
|
||||
</div>
|
||||
<div class="flex gap-8 py-6 flex-col">
|
||||
{sig.value.type == "loading"
|
||||
? (new Array(20).fill(0).map((_) => (
|
||||
<div class="animate-pulse bg-gray-300 p-2"></div>
|
||||
)))
|
||||
: (
|
||||
<div>
|
||||
{sig.value.type == "error"
|
||||
? (
|
||||
<div>
|
||||
<p>File Loading Failed</p>
|
||||
</div>
|
||||
)
|
||||
: (
|
||||
<StockList
|
||||
data={applyFilter(
|
||||
sig.value.data[0],
|
||||
sig.value.data[1],
|
||||
sig.value.data[2],
|
||||
)}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
function applyFilter(
|
||||
data: PageCorpsInfo,
|
||||
kospi: CorpSimple[],
|
||||
kosdaq: CorpSimple[],
|
||||
): PageCorpsInfo {
|
||||
const filter = getFilters(kospi, kosdaq);
|
||||
return {
|
||||
name: data.name,
|
||||
description: data.description,
|
||||
corpListByDate: mapValues(data.corpListByDate, (it: Coperation[]) => {
|
||||
return filterInfo(it, filter);
|
||||
}),
|
||||
};
|
||||
}
|
||||
function getFilters(
|
||||
kospi: CorpSimple[],
|
||||
kosdaq: CorpSimple[],
|
||||
): FilterInfoOption {
|
||||
return {
|
||||
otherwise: viewOtherwise.value,
|
||||
list: [{
|
||||
include: viewKospi.value,
|
||||
items: kospi,
|
||||
}, {
|
||||
include: viewKosdaq.value,
|
||||
items: kosdaq,
|
||||
}],
|
||||
};
|
||||
}
|
||||
}
|
File diff suppressed because it is too large
Load diff
24
main.ts
24
main.ts
|
@ -1,24 +0,0 @@
|
|||
/// <reference no-default-lib="true" />
|
||||
/// <reference lib="dom" />
|
||||
/// <reference lib="dom.iterable" />
|
||||
/// <reference lib="dom.asynciterable" />
|
||||
/// <reference lib="deno.ns" />
|
||||
|
||||
import "$std/dotenv/load.ts";
|
||||
|
||||
import { start } from "$fresh/server.ts";
|
||||
import manifest from "./fresh.gen.ts";
|
||||
|
||||
import twindPlugin from "$fresh/plugins/twindv1.ts";
|
||||
import twindConfig from "./twind.config.ts";
|
||||
|
||||
console.log("start");
|
||||
|
||||
Deno.addSignalListener("SIGINT", () => {
|
||||
Deno.exit(0);
|
||||
});
|
||||
|
||||
await start(manifest, {
|
||||
port: 12001,
|
||||
plugins: [twindPlugin(twindConfig)]
|
||||
});
|
58
meta.py
58
meta.py
|
@ -1,58 +0,0 @@
|
|||
import sqlite3
|
||||
import argparse
|
||||
from typing import Literal
|
||||
|
||||
KOSPI = "KOSPI"
|
||||
KOSDAQ = "KOSDAQ"
|
||||
|
||||
def queryAllCorpGroup(db: sqlite3.Connection, table: Literal["KOSPI", "KOSDAQ"]) -> list[tuple[str, str]]:
|
||||
assert table in [KOSPI, KOSDAQ]
|
||||
cursor = db.execute("SELECT Code, Name from "+ table)
|
||||
return [ (record[0],record[1]) for record in cursor]
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser()
|
||||
|
||||
parser.add_argument("--create", action="store_true", help="create db schema")
|
||||
parser.add_argument("--read", action="store", help="read csv and commit in db")
|
||||
parser.add_argument("--clean", action="store_true", help="clean db")
|
||||
parser.add_argument("--table-name", required=True, choices=["KOSPI", "KOSDAQ"])
|
||||
args = parser.parse_args()
|
||||
if args.create:
|
||||
db = sqlite3.connect("./stock.db")
|
||||
db.execute(f"""
|
||||
CREATE TABLE IF NOT EXISTS "{args.table_name}" (
|
||||
"Code" TEXT,
|
||||
"Name" TEXT,
|
||||
PRIMARY KEY("Code"),
|
||||
FOREIGN KEY("Code") REFERENCES "KRXCorp"("Code")
|
||||
)
|
||||
""")
|
||||
db.close()
|
||||
elif args.read:
|
||||
path = args.read
|
||||
import csv
|
||||
with open(path, "r", encoding='UTF-8') as fp:
|
||||
reader = csv.reader(fp)
|
||||
#next(reader) # skip header
|
||||
# header not exist
|
||||
# collect data
|
||||
data = [ (row[0], row[1]) for row in reader]
|
||||
|
||||
codeDict = { code:name for code, name in data}
|
||||
db = sqlite3.connect("./stock.db")
|
||||
dbData = {code:name for code, name in queryAllCorpGroup(db, args.table_name)}
|
||||
# remove common code
|
||||
commonCode = set(dbData.keys()) & set(codeDict.keys())
|
||||
for code in commonCode:
|
||||
del codeDict[code]
|
||||
del dbData[code]
|
||||
db.execute("BEGIN")
|
||||
# insert new code
|
||||
db.executemany(f"INSERT INTO {args.table_name} (Code, Name) VALUES (?, ?)", codeDict.items())
|
||||
# delete old code
|
||||
db.executemany(f"DELETE FROM {args.table_name} WHERE code = ?", dbData.keys())
|
||||
db.execute("COMMIT")
|
||||
db.commit()
|
||||
db.close()
|
44
notebook.py
44
notebook.py
|
@ -1,44 +0,0 @@
|
|||
# %%
|
||||
import matplotlib.pyplot as plt
|
||||
# %%
|
||||
import gen
|
||||
# %%
|
||||
dataStore = gen.DataStore()
|
||||
stock = dataStore.getStockPrice("108320",250)
|
||||
close = stock["CLOSE"]
|
||||
|
||||
d45 = close.loc[::-1].rolling(window=45).mean().dropna().loc[::-1]
|
||||
rsi = gen.calc_rsi(close.loc[::-1],14).dropna().loc[::-1]
|
||||
rsi.reset_index(drop = True, inplace = True)
|
||||
# %%
|
||||
|
||||
|
||||
# %%
|
||||
|
||||
krxCrops = dataStore.getAllKRXCorp()
|
||||
krxCrops
|
||||
# %%
|
||||
krxCrops[0]
|
||||
# %%
|
||||
# %%
|
||||
#%%time
|
||||
lst = []
|
||||
for entry in krxCrops:
|
||||
data = dataStore.getStockPrice(entry.Code, 120)
|
||||
lst.append(data)
|
||||
print("a")
|
||||
# %%
|
||||
lst
|
||||
# %%
|
||||
#%%time
|
||||
|
||||
lst_mean = [ s["CLOSE"].mean() for s in lst]
|
||||
|
||||
# %%
|
||||
len(krxCrops)
|
||||
# %%
|
||||
|
||||
RANGE = 50
|
||||
plt.plot(d45.iloc[:RANGE].loc[::-1])
|
||||
plt.plot(close.iloc[:RANGE].loc[::-1])
|
||||
plt.show()
|
41
pages.py
41
pages.py
|
@ -1,7 +1,40 @@
|
|||
import yaml
|
||||
|
||||
name = "name"
|
||||
desc = "description"
|
||||
|
||||
with open("pages.yaml", "r", encoding='utf-8') as fp:
|
||||
GenLists = yaml.safe_load(fp)
|
||||
GenLists = [{name:"cross 2", desc:"""\
|
||||
5일선과 20일선이 서로 만나는 시점 즉 상대 오차가 1% 이하이고
|
||||
5일선과 60일선이 서로 만나는 시점을 찾습니다.
|
||||
"""},
|
||||
{name:"cross 3",desc: """\
|
||||
cross 2의 조건에서 더해서 거래량이 이전 날짜보다 3배 증가하고
|
||||
100000 이상인 시점을 찾습니다.
|
||||
"""},
|
||||
{name:"cross 4",desc: """\
|
||||
20일선과 60일선이 서로 만나는 시점 즉 상대 오차가 1% 이하이고
|
||||
거래량이 1000000 이상인 시점을 찾습니다.
|
||||
"""},
|
||||
{name:"d20d5",desc: """\
|
||||
5일선이 20선보다 큰 시점을 찾습니다.
|
||||
"""},
|
||||
{name:"d20d5VolumeX5",desc: """\
|
||||
d20d5의 조건에서 더해서 거래량이 이전 날짜보다 5배 증가한 시점을 찾습니다.
|
||||
"""},
|
||||
{name:"DiffDistance",desc: """\
|
||||
5일선과 20일선이 서로 만나는 시점 즉 상대 오차가 3% 이하이고
|
||||
5일선과 60일선이 서로 만나고 거래량이 이전 날짜보다 3배 증가한
|
||||
시점을 찾습니다.
|
||||
"""},
|
||||
{name:"volume",desc: """\
|
||||
거래량이 이전 날짜보다 3배 증가한 시점을 찾습니다.
|
||||
"""},
|
||||
{name:"volume5",desc: """\
|
||||
거래량과 가격의 곱이 50,000,000,000 이상인 시점을 찾습니다.
|
||||
"""},
|
||||
{name:"volumeX5",desc: """\
|
||||
거래량이 이전 날짜보다 5배 증가한 시점을 찾습니다.
|
||||
"""},
|
||||
{name:"macd",desc: """\
|
||||
signal과 macd가 서로 교차한 시점을 찾습니다. 즉 signal이 아래로 떨어지고
|
||||
macd가 위로 올라가는 시점을 찾습니다.
|
||||
"""},
|
||||
]
|
53
pages.ts
53
pages.ts
|
@ -1,53 +0,0 @@
|
|||
import { parse } from "https://deno.land/std@0.195.0/yaml/mod.ts";
|
||||
import { join, fromFileUrl } from "https://deno.land/std@0.193.0/path/mod.ts";
|
||||
|
||||
export const PAGES_PATH = join(fromFileUrl(import.meta.url), "../pages.yaml");
|
||||
|
||||
export interface PageDescription {
|
||||
name: string;
|
||||
description: string;
|
||||
}
|
||||
|
||||
async function readPagesDescription() {
|
||||
const pagesText = await Deno.readTextFile(PAGES_PATH);
|
||||
const pages = parse(pagesText) as PageDescription[];
|
||||
return pages;
|
||||
}
|
||||
|
||||
function watchFile(
|
||||
path: string,
|
||||
callback: () => void | Promise<void>,
|
||||
) {
|
||||
const watcherRef = Deno.watchFs(path);
|
||||
(async () => {
|
||||
for await (const event of watcherRef) {
|
||||
if (event.kind == "modify") {
|
||||
await callback();
|
||||
}
|
||||
}
|
||||
})();
|
||||
const closeHandler = () => {
|
||||
watcherRef.close();
|
||||
};
|
||||
Deno.addSignalListener("SIGINT", closeHandler);
|
||||
return ()=>{
|
||||
Deno.removeSignalListener("SIGINT", closeHandler);
|
||||
closeHandler();
|
||||
}
|
||||
}
|
||||
|
||||
let pages_meta: PageDescription[] = [];
|
||||
let mtime = 0;
|
||||
let lastest_disposer = () => {};
|
||||
export async function get_pages_meta(): Promise<[PageDescription[],number]>{
|
||||
if (pages_meta.length == 0) {
|
||||
pages_meta = await readPagesDescription();
|
||||
mtime = Date.now();
|
||||
lastest_disposer();
|
||||
lastest_disposer = watchFile(PAGES_PATH, async () => {
|
||||
pages_meta = await readPagesDescription();
|
||||
mtime = Date.now();
|
||||
});
|
||||
}
|
||||
return [pages_meta, mtime];
|
||||
}
|
76
pages.yaml
76
pages.yaml
|
@ -1,76 +0,0 @@
|
|||
- name: cross d20 and d5
|
||||
description: 5일선이 20일 선과 교차한 시점을 찾습니다.
|
||||
- name: 정배열60
|
||||
description: 60일선이 어제보다 오늘이 더 큼
|
||||
- name: 정배열20
|
||||
description: 60일선이 어제보다 오늘이 더 크고 20일선 증가
|
||||
- name: 정배열240
|
||||
description: '60일선이 어제보다 오늘이 더 크고 20일선도 증가, 그리고 240일 선도 증가'
|
||||
- name: 모두 정배열
|
||||
description: |
|
||||
5일, 20일, 60일, 120일, 240일 모두 증가
|
||||
- name: 20일선 반등
|
||||
description: 20일선 반등
|
||||
- name: 120일선 반등
|
||||
description: 120일선 반등
|
||||
- name: 240일선 반등
|
||||
description: 240일선 반등
|
||||
- name: d20d5
|
||||
description: |
|
||||
5일선이 20선보다 큰 시점을 찾습니다.
|
||||
- name: d20d5VolumeX5
|
||||
description: |
|
||||
d20d5의 조건에서 더해서 거래량이 이전 날짜보다 5배 증가한 시점을 찾습니다.
|
||||
- name: volume
|
||||
description: |
|
||||
거래량이 이전 날짜보다 3배 증가한 시점을 찾습니다.
|
||||
- name: volume5
|
||||
description: |
|
||||
거래량과 가격의 곱이 50,000,000,000 이상인 시점을 찾습니다.
|
||||
- name: volumeX5
|
||||
description: |
|
||||
거래량이 이전 날짜보다 5배 증가한 시점을 찾습니다.
|
||||
- name: macd
|
||||
description: |
|
||||
signal과 macd가 서로 교차한 시점을 찾습니다. 즉 signal이 올라가고
|
||||
macd가 아래로 내려가는 시점을 찾습니다. macd 는 5일선과 10일선으로 이루어지고
|
||||
시그널을 구하기 위한 이동 평균은 4일입니다.
|
||||
- name: 뭉침
|
||||
description: d5, d20, d45 만난것 종가 5% 이내
|
||||
- name: 뭉침01
|
||||
description: d5, d20, d45 만난것 종가 1% 이내
|
||||
- name: 뭉침03
|
||||
description: d5, d20, d45 만난것 종가 3% 이내
|
||||
|
||||
- name: 45일선 반등
|
||||
description: 45일 선반등
|
||||
|
||||
- name: 뭉침5% 120선 상승
|
||||
description: 뭉침5% 120선 상승
|
||||
- name: 뭉침3% 120선 상승
|
||||
description: 뭉침3% 120선 상승
|
||||
- name: 뭉침1% 120선 상승
|
||||
description: 뭉침1% 120선 상승
|
||||
|
||||
- name: 60일 5일선 반등
|
||||
description: 60일선이 상향, 5일 반등
|
||||
- name: RSI 30 이하
|
||||
description: RSI 14일 이 30이하
|
||||
- name: d20d5돌파
|
||||
description: 이전날에는 20일선이 위에 있다가 5일선이 더 커졌을 때
|
||||
- name: 240일 증가
|
||||
description: 240일선이 증가하는 것.
|
||||
- name: 볼린저 밴드 25
|
||||
description: '볼린저 밴드(25일선 ,표준편차 2배)의 위 밴드 값을 넘었을 때 표시.'
|
||||
- name: 양봉사이20일선
|
||||
description: Open과 Close 사이 20일 선
|
||||
- name: 양봉사이240일선증가
|
||||
description: Open과 Close 사이 240일 선. 240일 선 증가
|
||||
- name: 떠있음
|
||||
description: |
|
||||
양봉, 음봉이 20일선, 60일선, 120선보다 떠있으면
|
||||
- name: 5일선반등120선증가
|
||||
description: 5일선이 반등 120 선 증가
|
||||
- name: 120선240선추월
|
||||
description: |
|
||||
120선이 상승해서 240일 선을 뚫을때
|
BIN
requirements.txt
BIN
requirements.txt
Binary file not shown.
|
@ -1,28 +0,0 @@
|
|||
|
||||
import { Head } from "$fresh/runtime.ts";
|
||||
|
||||
export default function Error404() {
|
||||
return (
|
||||
<>
|
||||
<Head>
|
||||
<title>404 - Page not found</title>
|
||||
</Head>
|
||||
<div class="px-4 py-8 mx-auto bg-[#86efac]">
|
||||
<div class="max-w-screen-md mx-auto flex flex-col items-center justify-center">
|
||||
<img
|
||||
class="my-6"
|
||||
src="/logo.svg"
|
||||
width="128"
|
||||
height="128"
|
||||
alt="the fresh logo: a sliced lemon dripping with juice"
|
||||
/>
|
||||
<h1 class="text-4xl font-bold">404 - Page not found</h1>
|
||||
<p class="my-4">
|
||||
The page you were looking for doesn't exist.
|
||||
</p>
|
||||
<a href="/" class="underline">Go back home</a>
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
}
|
|
@ -1,9 +0,0 @@
|
|||
import { AppProps } from "$fresh/server.ts";
|
||||
|
||||
export default function App({ Component }: AppProps) {
|
||||
return (
|
||||
<>
|
||||
<Component />
|
||||
</>
|
||||
);
|
||||
}
|
|
@ -1,35 +0,0 @@
|
|||
import { Handlers } from "$fresh/server.ts";
|
||||
import { db } from "../../../db/db.ts";
|
||||
import { jsonArrayFrom } from "kysely/helpers/sqlite";
|
||||
|
||||
export const handler: Handlers = {
|
||||
async GET(req, ctx): Promise<Response> {
|
||||
const headers = new Headers({
|
||||
"content-type": "application/json"
|
||||
});
|
||||
const index = ctx.params.index;
|
||||
const corp = await db.selectFrom("KRXCorp")
|
||||
.selectAll([
|
||||
"KRXCorp"
|
||||
])
|
||||
.select(eb=> [
|
||||
jsonArrayFrom(eb.selectFrom("stock")
|
||||
.select([
|
||||
"stock.Close",
|
||||
"stock.Open",
|
||||
"stock.Low",
|
||||
"stock.High",
|
||||
"stock.Date",
|
||||
"stock.Volume",
|
||||
])
|
||||
.where("Code", "=", index)
|
||||
.orderBy("Date", "desc")
|
||||
.limit(100)
|
||||
).as("prices")]
|
||||
)
|
||||
.where("Code", "=", index)
|
||||
.executeTakeFirst();
|
||||
|
||||
return new Response(JSON.stringify(corp ?? null), {headers});
|
||||
},
|
||||
}
|
|
@ -1,21 +0,0 @@
|
|||
import { Handlers } from "$fresh/server.ts";
|
||||
import { db } from "../../../db/db.ts";
|
||||
|
||||
export const handler: Handlers = {
|
||||
async GET(req, _ctx): Promise<Response> {
|
||||
const headers = new Headers({
|
||||
"content-type": "application/json"
|
||||
});
|
||||
const url = new URL(req.url);
|
||||
const q = url.searchParams.get("q");
|
||||
const name = url.searchParams.get("name");
|
||||
const corps = await db.selectFrom("KRXCorp")
|
||||
.selectAll([
|
||||
"KRXCorp"
|
||||
])
|
||||
.$if(!!q, qb=> qb.where("Name", "like", "%"+q+"%"))
|
||||
.$if(!!name, qb => qb.where("Name", "=", name))
|
||||
.execute();
|
||||
return new Response(JSON.stringify(corps), {headers});
|
||||
},
|
||||
}
|
|
@ -1,21 +0,0 @@
|
|||
import { HandlerContext } from "$fresh/server.ts";
|
||||
|
||||
// Jokes courtesy of https://punsandoneliners.com/randomness/programmer-jokes/
|
||||
const JOKES = [
|
||||
"Why do Java developers often wear glasses? They can't C#.",
|
||||
"A SQL query walks into a bar, goes up to two tables and says “can I join you?”",
|
||||
"Wasn't hard to crack Forrest Gump's password. 1forrest1.",
|
||||
"I love pressing the F5 key. It's refreshing.",
|
||||
"Called IT support and a chap from Australia came to fix my network connection. I asked “Do you come from a LAN down under?”",
|
||||
"There are 10 types of people in the world. Those who understand binary and those who don't.",
|
||||
"Why are assembly programmers often wet? They work below C level.",
|
||||
"My favourite computer based band is the Black IPs.",
|
||||
"What programme do you use to predict the music tastes of former US presidential candidates? An Al Gore Rhythm.",
|
||||
"An SEO expert walked into a bar, pub, inn, tavern, hostelry, public house.",
|
||||
];
|
||||
|
||||
export const handler = (_req: Request, _ctx: HandlerContext): Response => {
|
||||
const randomIndex = Math.floor(Math.random() * JOKES.length);
|
||||
const body = JOKES[randomIndex];
|
||||
return new Response(body);
|
||||
};
|
|
@ -1,17 +0,0 @@
|
|||
import { Handlers } from "$fresh/server.ts";
|
||||
import { db } from "../../db/db.ts";
|
||||
|
||||
export const handler: Handlers = {
|
||||
async GET(req, _ctx): Promise<Response> {
|
||||
const headers = new Headers({
|
||||
"content-type": "application/json"
|
||||
});
|
||||
const rows = await db.selectFrom("KOSDAQ")
|
||||
.select([
|
||||
"Code",
|
||||
"Name"
|
||||
])
|
||||
.execute();
|
||||
return new Response(JSON.stringify(rows), {headers});
|
||||
},
|
||||
}
|
|
@ -1,17 +0,0 @@
|
|||
import { Handlers } from "$fresh/server.ts";
|
||||
import { db } from "../../db/db.ts";
|
||||
|
||||
export const handler: Handlers = {
|
||||
async GET(req, _ctx): Promise<Response> {
|
||||
const headers = new Headers({
|
||||
"content-type": "application/json"
|
||||
});
|
||||
const rows = await db.selectFrom("KOSPI")
|
||||
.select([
|
||||
"Code",
|
||||
"Name"
|
||||
])
|
||||
.execute();
|
||||
return new Response(JSON.stringify(rows), {headers});
|
||||
},
|
||||
}
|
|
@ -1,42 +0,0 @@
|
|||
import { Handlers } from "$fresh/server.ts";
|
||||
import { Status, STATUS_TEXT } from "https://deno.land/std@0.195.0/http/mod.ts";
|
||||
import { fromFileUrl, join } from "$std/path/mod.ts";
|
||||
|
||||
|
||||
export const handler: Handlers = {
|
||||
async GET(req, ctx): Promise<Response> {
|
||||
const headers = new Headers({
|
||||
"content-type": "application/json"
|
||||
});
|
||||
const path = join(fromFileUrl(import.meta.url), "../../../../dist", `${ctx.params.name}.json`);
|
||||
console.log("path : ",path)
|
||||
let stat;
|
||||
try {
|
||||
stat = await Deno.stat(path);
|
||||
}
|
||||
catch(err){
|
||||
if(err instanceof Deno.errors.NotFound){
|
||||
return await ctx.renderNotFound();
|
||||
}
|
||||
else {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
const mtime = stat.mtime ?? new Date(0);
|
||||
const body = await Deno.readTextFile(path);
|
||||
headers.set("last-modified", mtime.toUTCString());
|
||||
console.log(mtime);
|
||||
// headers.set("cache-control", "max-age=600");
|
||||
|
||||
// const ifModifiedSinceValue = req.headers.get("if-modified-since");
|
||||
// if ( ifModifiedSinceValue &&
|
||||
// mtime.getTime() <= new Date(ifModifiedSinceValue).getTime()
|
||||
// ){
|
||||
// return new Response(null, {
|
||||
// status: Status.NotModified,
|
||||
// statusText: STATUS_TEXT[Status.NotModified]
|
||||
// })
|
||||
// }
|
||||
return new Response(body, {headers});
|
||||
},
|
||||
};
|
|
@ -1,24 +0,0 @@
|
|||
import { Handlers } from "$fresh/server.ts";
|
||||
import { get_pages_meta } from "../../../pages.ts";
|
||||
import { Status, STATUS_TEXT } from "https://deno.land/std@0.195.0/http/mod.ts";
|
||||
|
||||
export const handler: Handlers = {
|
||||
async GET(req, _ctx): Promise<Response> {
|
||||
const headers = new Headers({
|
||||
"content-type": "application/json"
|
||||
});
|
||||
const [body, mtime] = await get_pages_meta();
|
||||
headers.set("last-modified", new Date(mtime).toUTCString());
|
||||
console.log("aaa");
|
||||
const ifModifiedSinceValue = req.headers.get("if-modified-since");
|
||||
if ( ifModifiedSinceValue &&
|
||||
mtime != new Date(ifModifiedSinceValue).getTime()
|
||||
){
|
||||
return new Response(null, {
|
||||
status: Status.NotModified,
|
||||
statusText: STATUS_TEXT[Status.NotModified]
|
||||
})
|
||||
}
|
||||
return new Response(JSON.stringify(body), {headers});
|
||||
},
|
||||
};
|
|
@ -1,5 +0,0 @@
|
|||
import { PageProps } from "$fresh/server.ts";
|
||||
|
||||
export default function Greet(props: PageProps) {
|
||||
return <div>Hello {props.params.name}</div>;
|
||||
}
|
|
@ -1,46 +0,0 @@
|
|||
import { Head } from "$fresh/runtime.ts";
|
||||
import { get_pages_meta, PageDescription } from "../pages.ts";
|
||||
import { Handlers, PageProps } from "$fresh/server.ts";
|
||||
|
||||
export const handler: Handlers = {
|
||||
async GET(_req, ctx) {
|
||||
const [pages, _] = await get_pages_meta();
|
||||
return await ctx.render(pages);
|
||||
},
|
||||
};
|
||||
|
||||
export default function Home({ data }: PageProps<PageDescription[]>) {
|
||||
return (
|
||||
<>
|
||||
<Head>
|
||||
<title>stock-front</title>
|
||||
</Head>
|
||||
<div class="px-4 py-8 mx-auto bg-[#86efac] min-h-screen">
|
||||
<div class="max-w-screen-md mx-auto flex flex-col items-center justify-center">
|
||||
<img
|
||||
class="my-6"
|
||||
src="/logo.svg"
|
||||
width="128"
|
||||
height="128"
|
||||
alt="the fresh logo: a sliced lemon dripping with juice"
|
||||
/>
|
||||
<h1 class="text-4xl font-bold">Stock</h1>
|
||||
<div class="my-4">
|
||||
<ul>
|
||||
{data.map((x) => (
|
||||
<li class="my-2">
|
||||
<a
|
||||
class="p-2 block hover:bg-gray-300 bg-white rounded"
|
||||
href={`/pages/${encodeURIComponent(x.name)}`}
|
||||
>
|
||||
{x.name}
|
||||
</a>
|
||||
</li>
|
||||
))}
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
}
|
|
@ -1,38 +0,0 @@
|
|||
import { PageProps, Handlers } from "$fresh/server.ts";
|
||||
import { Head } from "$fresh/runtime.ts";
|
||||
import { get_pages_meta, PageDescription } from "../../pages.ts";
|
||||
import StockList from "../../islands/StockList.tsx";
|
||||
|
||||
export const handler: Handlers = {
|
||||
async GET(_req, ctx) {
|
||||
const [pages, _] = await get_pages_meta();
|
||||
const name = ctx.params.name;
|
||||
const page = pages.filter(x=> x.name === name);
|
||||
if (page.length === 0) {
|
||||
return await ctx.renderNotFound();
|
||||
}
|
||||
return await ctx.render(page[0]);
|
||||
},
|
||||
};
|
||||
|
||||
export default function Pages(props: PageProps<PageDescription>) {
|
||||
return <>
|
||||
<Head>
|
||||
<title>Stock: {props.params.name}</title>
|
||||
</Head>
|
||||
<div class="px-4 py-8 mx-auto bg-[#86efac] min-h-screen">
|
||||
<div class="max-w-screen-md mx-auto flex flex-col items-center justify-center">
|
||||
<img
|
||||
class="my-6"
|
||||
src="/stockgraph.svg"
|
||||
width="128"
|
||||
height="128"
|
||||
alt="stock graph"
|
||||
/>
|
||||
<h1 class="text-4xl">{props.params.name}</h1>
|
||||
<p>{props.data.description}</p>
|
||||
<StockList pageName={props.params.name}></StockList>
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
}
|
Binary file not shown.
Before Width: | Height: | Size: 22 KiB |
|
@ -1,150 +0,0 @@
|
|||
000250,삼천당제약
|
||||
003380,하림지주
|
||||
005290,동진쎄미켐
|
||||
006730,서부T&D
|
||||
007390,네이처셀
|
||||
015750,성우하이텍
|
||||
018000,유니슨
|
||||
022100,포스코 ICT
|
||||
023410,유진기업
|
||||
025900,동화기업
|
||||
025980,아난티
|
||||
027360,아주IB투자
|
||||
028150,GS홈쇼핑
|
||||
028300,에이치엘비
|
||||
029960,코엔텍
|
||||
030190,NICE평가정보
|
||||
030530,원익홀딩스
|
||||
031390,녹십자셀
|
||||
032190,다우데이타
|
||||
032500,케이엠더블유
|
||||
033290,코웰패션
|
||||
033640,네패스
|
||||
034230,파라다이스
|
||||
035600,KG이니시스
|
||||
035760,CJ ENM
|
||||
035900,JYP Ent.
|
||||
036490,SK머티리얼즈
|
||||
036540,SFA반도체
|
||||
036810,에프에스티
|
||||
036830,솔브레인홀딩스
|
||||
036930,주성엔지니어링
|
||||
038500,삼표시멘트
|
||||
038540,상상인
|
||||
039030,이오테크닉스
|
||||
039200,오스코텍
|
||||
041190,우리기술투자
|
||||
041510,에스엠
|
||||
041960,코미팜
|
||||
042000,카페24
|
||||
044340,위닉스
|
||||
045390,대아티아이
|
||||
046890,서울반도체
|
||||
048260,오스템임플란트
|
||||
048410,현대바이오
|
||||
048530,인트론바이오
|
||||
053030,바이넥스
|
||||
053800,안랩
|
||||
056190,에스에프에이
|
||||
058470,리노공업
|
||||
058820,CMG제약
|
||||
060150,인선이엔티
|
||||
060250,NHN한국사이버결제
|
||||
060720,KH바텍
|
||||
061970,엘비세미콘
|
||||
064760,티씨케이
|
||||
066970,엘앤에프
|
||||
067160,아프리카TV
|
||||
067630,에이치엘비생명과학
|
||||
068240,다원시스
|
||||
068760,셀트리온제약
|
||||
069080,웹젠
|
||||
078020,이베스트투자증권
|
||||
078070,유비쿼스홀딩스
|
||||
078130,국일제지
|
||||
078160,메디포스트
|
||||
078340,컴투스
|
||||
078600,대주전자재료
|
||||
082270,젬백스
|
||||
083790,크리스탈지노믹스
|
||||
084370,유진테크
|
||||
084850,아이티엠반도체
|
||||
084990,헬릭스미스
|
||||
085660,차바이오텍
|
||||
086450,동국제약
|
||||
086520,에코프로
|
||||
086900,메디톡스
|
||||
088800,에이스테크
|
||||
089980,상아프론테크
|
||||
090460,비에이치
|
||||
091700,파트론
|
||||
091990,셀트리온헬스케어
|
||||
092040,아미코젠
|
||||
092190,서울바이오시스
|
||||
095610,테스
|
||||
095700,제넥신
|
||||
096530,씨젠
|
||||
097520,엠씨넥스
|
||||
098460,고영
|
||||
100090,삼강엠앤티
|
||||
100130,동국S&C
|
||||
101490,에스앤에스텍
|
||||
102710,이엔에프테크놀로지
|
||||
104830,원익머트리얼즈
|
||||
108230,톱텍
|
||||
108320,실리콘웍스
|
||||
112040,위메이드
|
||||
115450,지트리비앤티
|
||||
119860,다나와
|
||||
122870,와이지엔터테인먼트
|
||||
122990,와이솔
|
||||
131290,티에스이
|
||||
131370,알서포트
|
||||
131970,테스나
|
||||
137400,피엔티
|
||||
138080,오이솔루션
|
||||
140410,메지온
|
||||
140860,파크시스템스
|
||||
141080,레고켐바이오
|
||||
144510,녹십자랩셀
|
||||
145020,휴젤
|
||||
166090,하나머티리얼즈
|
||||
175250,아이큐어
|
||||
178320,서진시스템
|
||||
178920,PI첨단소재
|
||||
182400,엔케이맥스
|
||||
183490,엔지켐생명과학
|
||||
192440,슈피겐코리아
|
||||
194480,데브시스터즈
|
||||
196170,알테오젠
|
||||
200130,콜마비앤에이치
|
||||
200230,텔콘RF제약
|
||||
204270,제이앤티씨
|
||||
213420,덕산네오룩스
|
||||
214150,클래시스
|
||||
214450,파마리서치
|
||||
215200,메가스터디교육
|
||||
218410,RFHIC
|
||||
222080,씨아이에스
|
||||
222800,심텍
|
||||
230360,에코마케팅
|
||||
235980,메드팩토
|
||||
237690,에스티팜
|
||||
240810,원익IPS
|
||||
243070,휴온스
|
||||
247540,에코프로비엠
|
||||
253450,스튜디오드래곤
|
||||
263050,유틸렉스
|
||||
263750,펄어비스
|
||||
265520,AP시스템
|
||||
267980,매일유업
|
||||
268600,셀리버리
|
||||
272290,이녹스첨단소재
|
||||
278280,천보
|
||||
290510,코리아센터
|
||||
290650,엘앤씨바이오
|
||||
293490,카카오게임즈
|
||||
294140,레몬
|
||||
298380,에이비엘바이오
|
||||
323990,박셀바이오
|
||||
357780,솔브레인
|
|
200
static/kospi.csv
200
static/kospi.csv
|
@ -1,200 +0,0 @@
|
|||
000070,삼양홀딩스
|
||||
000080,하이트진로
|
||||
000100,유한양행
|
||||
000120,CJ대한통운
|
||||
000150,두산
|
||||
000210,DL
|
||||
000240,한국앤컴퍼니
|
||||
000270,기아
|
||||
000660,SK하이닉스
|
||||
000670,영풍
|
||||
000720,현대건설
|
||||
000810,삼성화재
|
||||
000880,한화
|
||||
000990,DB하이텍
|
||||
001040,CJ
|
||||
001060,JW중외제약
|
||||
001120,LG상사
|
||||
001230,동국제강
|
||||
001440,대한전선
|
||||
001450,현대해상
|
||||
001680,대상
|
||||
001740,SK네트웍스
|
||||
001800,오리온홀딩스
|
||||
002350,넥센타이어
|
||||
002380,KCC
|
||||
002790,아모레G
|
||||
003000,부광약품
|
||||
003090,대웅
|
||||
003230,삼양식품
|
||||
003240,태광산업
|
||||
003410,쌍용C&E
|
||||
003490,대한항공
|
||||
003520,영진약품
|
||||
003550,LG
|
||||
003670,포스코케미칼
|
||||
003850,보령제약
|
||||
004000,롯데정밀화학
|
||||
004020,현대제철
|
||||
004170,신세계
|
||||
004370,농심
|
||||
004490,세방전지
|
||||
004800,효성
|
||||
004990,롯데지주
|
||||
005250,녹십자홀딩스
|
||||
005300,롯데칠성
|
||||
005380,현대차
|
||||
005440,현대그린푸드
|
||||
005490,POSCO
|
||||
005830,DB손해보험
|
||||
005930,삼성전자
|
||||
005940,NH투자증권
|
||||
006040,동원산업
|
||||
006120,SK디스커버리
|
||||
006260,LS
|
||||
006280,녹십자
|
||||
006360,GS건설
|
||||
006400,삼성SDI
|
||||
006650,대한유화
|
||||
006800,미래에셋증권
|
||||
007070,GS리테일
|
||||
007310,오뚜기
|
||||
007570,일양약품
|
||||
007700,F&F 홀딩스
|
||||
008560,메리츠증권
|
||||
008770,호텔신라
|
||||
008930,한미사이언스
|
||||
009150,삼성전기
|
||||
009240,한샘
|
||||
009420,한올바이오파마
|
||||
009540,한국조선해양
|
||||
009830,한화솔루션
|
||||
010060,OCI
|
||||
010120,LS ELECTRIC
|
||||
010130,고려아연
|
||||
010140,삼성중공업
|
||||
010620,현대미포조선
|
||||
010780,아이에스동서
|
||||
010950,S-Oil
|
||||
011070,LG이노텍
|
||||
011170,롯데케미칼
|
||||
011200,HMM
|
||||
011210,현대위아
|
||||
011780,금호석유
|
||||
011790,SKC
|
||||
012330,현대모비스
|
||||
012450,한화에어로스페이스
|
||||
012630,HDC
|
||||
012750,에스원
|
||||
013890,지누스
|
||||
014680,한솔케미칼
|
||||
014820,동원시스템즈
|
||||
015760,한국전력
|
||||
016360,삼성증권
|
||||
016380,KG동부제철
|
||||
017670,SK텔레콤
|
||||
017800,현대엘리베이
|
||||
018260,삼성에스디에스
|
||||
018880,한온시스템
|
||||
019170,신풍제약
|
||||
020000,한섬
|
||||
020150,일진머티리얼즈
|
||||
020560,아시아나항공
|
||||
021240,코웨이
|
||||
023530,롯데쇼핑
|
||||
024110,기업은행
|
||||
026960,동서
|
||||
028050,삼성엔지니어링
|
||||
028260,삼성물산
|
||||
028670,팬오션
|
||||
029780,삼성카드
|
||||
030000,제일기획
|
||||
030200,KT
|
||||
031430,신세계인터내셔날
|
||||
032350,롯데관광개발
|
||||
032640,LG유플러스
|
||||
032830,삼성생명
|
||||
033780,KT&G
|
||||
034020,두산중공업
|
||||
034220,LG디스플레이
|
||||
034730,SK
|
||||
035250,강원랜드
|
||||
035420,NAVER
|
||||
035720,카카오
|
||||
036460,한국가스공사
|
||||
036570,엔씨소프트
|
||||
039490,키움증권
|
||||
042660,대우조선해양
|
||||
042670,두산인프라코어
|
||||
047040,대우건설
|
||||
047050,포스코인터내셔널
|
||||
047810,한국항공우주
|
||||
049770,동원F&B
|
||||
051600,한전KPS
|
||||
051900,LG생활건강
|
||||
051910,LG화학
|
||||
052690,한전기술
|
||||
055550,신한지주
|
||||
057050,현대홈쇼핑
|
||||
064350,현대로템
|
||||
064960,SNT모티브
|
||||
066570,LG전자
|
||||
068270,셀트리온
|
||||
069260,휴켐스
|
||||
069620,대웅제약
|
||||
069960,현대백화점
|
||||
071050,한국금융지주
|
||||
071840,롯데하이마트
|
||||
073240,금호타이어
|
||||
078930,GS
|
||||
079160,CJ CGV
|
||||
079550,LIG넥스원
|
||||
081660,휠라홀딩스
|
||||
086280,현대글로비스
|
||||
086790,하나금융지주
|
||||
088350,한화생명
|
||||
090430,아모레퍼시픽
|
||||
093370,후성
|
||||
096770,SK이노베이션
|
||||
097950,CJ제일제당
|
||||
103140,풍산
|
||||
105560,KB금융
|
||||
105630,한세실업
|
||||
108670,LG하우시스
|
||||
111770,영원무역
|
||||
112610,씨에스윈드
|
||||
114090,GKL
|
||||
115390,락앤락
|
||||
120110,코오롱인더
|
||||
128940,한미약품
|
||||
138930,BNK금융지주
|
||||
139480,이마트
|
||||
161390,한국타이어앤테크놀로지
|
||||
161890,한국콜마
|
||||
180640,한진칼
|
||||
185750,종근당
|
||||
192080,더블유게임즈
|
||||
192400,쿠쿠홀딩스
|
||||
192820,코스맥스
|
||||
204320,만도
|
||||
207940,삼성바이오로직스
|
||||
214320,이노션
|
||||
241560,두산밥캣
|
||||
241590,화승엔터프라이즈
|
||||
251270,넷마블
|
||||
267250,현대중공업지주
|
||||
271560,오리온
|
||||
272210,한화시스템
|
||||
282330,BGF리테일
|
||||
284740,쿠쿠홈시스
|
||||
285130,SK케미칼
|
||||
294870,HDC현대산업개발
|
||||
298020,효성티앤씨
|
||||
298050,효성첨단소재
|
||||
302440,SK바이오사이언스
|
||||
316140,우리금융지주
|
||||
326030,SK바이오팜
|
||||
336260,두산퓨얼셀
|
||||
352820,하이브
|
||||
375500,DL이앤씨
|
||||
383800,LX홀딩스
|
|
|
@ -1,6 +0,0 @@
|
|||
<svg width="40" height="40" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M34.092 8.845C38.929 20.652 34.092 27 30 30.5c1 3.5-2.986 4.222-4.5 2.5-4.457 1.537-13.512 1.487-20-5C2 24.5 4.73 16.714 14 11.5c8-4.5 16-7 20.092-2.655Z" fill="#FFDB1E"/>
|
||||
<path d="M14 11.5c6.848-4.497 15.025-6.38 18.368-3.47C37.5 12.5 21.5 22.612 15.5 25c-6.5 2.587-3 8.5-6.5 8.5-3 0-2.5-4-5.183-7.75C2.232 23.535 6.16 16.648 14 11.5Z" fill="#fff" stroke="#FFDB1E"/>
|
||||
<path d="M28.535 8.772c4.645 1.25-.365 5.695-4.303 8.536-3.732 2.692-6.606 4.21-7.923 4.83-.366.173-1.617-2.252-1.617-1 0 .417-.7 2.238-.934 2.326-1.365.512-4.223 1.29-5.835 1.29-3.491 0-1.923-4.754 3.014-9.122.892-.789 1.478-.645 2.283-.645-.537-.773-.534-.917.403-1.546C17.79 10.64 23 8.77 25.212 8.42c.366.014.82.35.82.629.41-.14 2.095-.388 2.503-.278Z" fill="#FFE600"/>
|
||||
<path d="M14.297 16.49c.985-.747 1.644-1.01 2.099-2.526.566.121.841-.08 1.29-.701.324.466 1.657.608 2.453.701-.715.451-1.057.852-1.452 2.106-1.464-.611-3.167-.302-4.39.42Z" fill="#fff"/>
|
||||
</svg>
|
Before Width: | Height: | Size: 1 KiB |
|
@ -1,13 +0,0 @@
|
|||
<?xml version="1.0" encoding="utf-8"?><!--
|
||||
// 16pxls (c) by Paul mackenzie <paul@whatspauldoing.com>
|
||||
//
|
||||
// 16pxls is licensed under a
|
||||
// Creative Commons Attribution-ShareAlike 4.0 International License.
|
||||
//
|
||||
// You should have received a copy of the license along with this
|
||||
// work. If not, see <http://creativecommons.org/licenses/by-sa/4.0/>.
|
||||
-->
|
||||
|
||||
<svg fill="#000000" width="800px" height="800px" viewBox="0 0 16 16" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M0 14h16v2H0v-2zm8.5-8l4-4H11V0h5v5h-2V3.5L9.5 8l-1 1-2-2-5 5L0 10.5 6.5 4 8 5.5l.5.5z" fill-rule="evenodd"/>
|
||||
</svg>
|
Before Width: | Height: | Size: 590 B |
|
@ -7,8 +7,10 @@ import sqlite3
|
|||
import datetime
|
||||
|
||||
def get_naver_finance_price(code,page=1):
|
||||
#url = (f'https://finance.naver.com/item/sise_day.nhn?code={code}&page={page}')
|
||||
url = 'https://finance.naver.com/item/sise_day.nhn'
|
||||
headers = {"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.77 Safari/537.36"}
|
||||
# print(url)
|
||||
html = requests.get(url,params={'code':code,'page':page},headers=headers)
|
||||
if html.status_code != 200:
|
||||
raise UserWarning(html.status_code)
|
||||
|
@ -50,6 +52,7 @@ def croll_naver_page(code,page,date):
|
|||
|
||||
def croll_naver_page_all(code,date) -> pd.DataFrame:
|
||||
html_text = get_naver_finance_price(code)
|
||||
#print(html_text)
|
||||
s = bs4.BeautifulSoup(html_text,'html.parser')
|
||||
last = int(get_last_page(s))
|
||||
r = [(code,i) for i in range(1,last+1)]
|
||||
|
@ -63,6 +66,9 @@ def croll_naver_page_all(code,date) -> pd.DataFrame:
|
|||
if len(retdata) == 0:
|
||||
return []
|
||||
return pd.concat(retdata,ignore_index=True)
|
||||
#with mp.Pool(CPU_COUNT) as pl:
|
||||
# dl = pl.starmap(croll_naver_page,r)
|
||||
# return pd.concat(dl,ignore_index=True)
|
||||
|
||||
def toSqlPos(x,code):
|
||||
return (code,x["날짜"],x["종가"],x["전일비"],x["시가"],x["고가"],x["저가"],x["거래량"])
|
||||
|
|
|
@ -1,7 +0,0 @@
|
|||
export default {
|
||||
content: ["./**/*.{html,tsx}"],
|
||||
theme:{
|
||||
extend:{},
|
||||
},
|
||||
plugins: [],
|
||||
}
|
59
templates/Lists.html
Normal file
59
templates/Lists.html
Normal file
|
@ -0,0 +1,59 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>Stock</title>
|
||||
<style>
|
||||
body{
|
||||
font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif;
|
||||
background: linear-gradient(to right, #2b2b2b, #3d1844);
|
||||
color: #fff;
|
||||
}
|
||||
.table_item:nth-child(2n){
|
||||
background: #a7a7a7;
|
||||
}
|
||||
.table_item:nth-child(2n+1){
|
||||
background: #fff;
|
||||
}
|
||||
.table_item:hover{
|
||||
background: #8d8d8d;
|
||||
}
|
||||
.container{
|
||||
display: grid;
|
||||
grid-template-rows: 24px auto;
|
||||
background: #f0f0f0;
|
||||
color: black;
|
||||
box-shadow: 0px 0px 5px 0px white;
|
||||
text-decoration: none;
|
||||
grid-template-columns: repeat({{ 5 }}, 1fr);
|
||||
}
|
||||
.container a:link, a:visited{
|
||||
text-decoration: none;
|
||||
color: black;
|
||||
}
|
||||
.data_header{
|
||||
border-bottom: 1px solid #a7a7a7;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div style="margin: auto; max-width: 750px;">
|
||||
<h1>{{title}} Stock List</h1>
|
||||
<section class="description">
|
||||
{{collected.description}}
|
||||
</section>
|
||||
<section class="container">
|
||||
{% for day in days|reverse %}
|
||||
<div class="data_header">{{ day }}</div>
|
||||
{% endfor %}
|
||||
{% for day in days|reverse %}
|
||||
{% set corplist = collected.corpListByDate[day] %}
|
||||
<div>{% for item in corplist %}
|
||||
<div class="table_item"><a href="https://stockplus.com/m/stocks/KOREA-A{{ item.Code }}">{{ item.Name }}({{item.Code}})</a></div>{% endfor %}
|
||||
</div>
|
||||
{% endfor %}
|
||||
</section>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
40
templates/index.html
Normal file
40
templates/index.html
Normal file
|
@ -0,0 +1,40 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="ko">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>Stock</title>
|
||||
<style>
|
||||
body{
|
||||
font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif;
|
||||
background: linear-gradient(to right, #2b2b2b, #3d1844);
|
||||
color: #fff;
|
||||
}
|
||||
.container{
|
||||
display: grid;
|
||||
background: #f0f0f0;
|
||||
color: black;
|
||||
box-shadow: 0px 0px 5px 0px white;
|
||||
text-decoration: none;
|
||||
}
|
||||
.container a:link, a:visited{
|
||||
text-decoration: none;
|
||||
color: black;
|
||||
font-size: 40px;
|
||||
}
|
||||
.data_header{
|
||||
border-bottom: 1px solid #a7a7a7;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div style="margin: auto; max-width: 750px;">
|
||||
<h1>Main</h1>
|
||||
<div class="container">
|
||||
{% for p in pages %}
|
||||
<a href="/dist/{{p.name}}.html">{{p.name}}</a>
|
||||
{% endfor %}
|
||||
</div>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
972
test.ipynb
972
test.ipynb
File diff suppressed because one or more lines are too long
|
@ -1,10 +0,0 @@
|
|||
import { defineConfig, Preset } from "https://esm.sh/@twind/core@1.1.3";
|
||||
import presetTailwind from "https://esm.sh/@twind/preset-tailwind@1.1.4";
|
||||
import presetAutoprefix from "https://esm.sh/@twind/preset-autoprefix@1.0.7";
|
||||
|
||||
export default {
|
||||
...defineConfig({
|
||||
presets: [presetTailwind() as Preset, presetAutoprefix()],
|
||||
}),
|
||||
selfURL: import.meta.url,
|
||||
}
|
38
util/api.ts
38
util/api.ts
|
@ -1,38 +0,0 @@
|
|||
export interface Coperation {
|
||||
Name: string;
|
||||
Code: string;
|
||||
Sector: string;
|
||||
Product: string;
|
||||
ListingDay: string;
|
||||
ClosingMonth: string;
|
||||
Representative: string;
|
||||
Homepage: string;
|
||||
AddressArea: string;
|
||||
LastUpdate: string;
|
||||
}
|
||||
|
||||
export interface PageCorpsInfo {
|
||||
name: string;
|
||||
description: string;
|
||||
corpListByDate: Record<string, Coperation[]>;
|
||||
}
|
||||
|
||||
export interface CorpSimple {
|
||||
Code: string;
|
||||
Name: string;
|
||||
}
|
||||
|
||||
export async function fetchPageInfo(pageName: string): Promise<PageCorpsInfo>{
|
||||
const res = await fetch("/api/pages/" + encodeURIComponent(pageName));
|
||||
return await res.json();
|
||||
}
|
||||
|
||||
export async function fetchKospiList(): Promise<CorpSimple[]>{
|
||||
const res = await fetch("/api/kospi");
|
||||
return await res.json();
|
||||
}
|
||||
|
||||
export async function fetchKosdaqList(): Promise<CorpSimple[]> {
|
||||
const res = await fetch("/api/kosdaq");
|
||||
return await res.json();
|
||||
}
|
35
util/util.ts
35
util/util.ts
|
@ -1,35 +0,0 @@
|
|||
import { Signal, useSignal } from "@preact/signals";
|
||||
import { useEffect } from "preact/hooks";
|
||||
|
||||
export type QueryStatus<T> = {
|
||||
type: "loading";
|
||||
} | {
|
||||
type: "complete";
|
||||
data: T;
|
||||
} | {
|
||||
type: "error";
|
||||
err: Error;
|
||||
};
|
||||
|
||||
export function useAsync<T>(fn: () => Promise<T>): Signal<QueryStatus<T>> {
|
||||
const state = useSignal({
|
||||
type: "loading",
|
||||
} as QueryStatus<T>);
|
||||
useEffect(() => {
|
||||
(async () => {
|
||||
try {
|
||||
const data = await fn();
|
||||
state.value = {
|
||||
type: "complete",
|
||||
data: data,
|
||||
};
|
||||
} catch (err) {
|
||||
state.value = {
|
||||
type: "error",
|
||||
err: err,
|
||||
};
|
||||
}
|
||||
})();
|
||||
}, []);
|
||||
return state;
|
||||
}
|
Loading…
Add table
Reference in a new issue