gusti-adli commited on
Commit
0a3ce35
1 Parent(s): a59e1e1
Dockerfile CHANGED
@@ -1,36 +1,42 @@
1
  #Grab python=3.9-slim-buster image
2
  FROM python:3.9-slim-buster
3
 
4
-
5
- # Copy code to container
6
- COPY . .
7
-
8
  # RUN apt-get update
9
- RUN apt-get update && apt-get install -y build-essential wget python3-dev python3-pip gcc
10
  # openldap-dev libffi-dev jpeg-dev zlib-dev libmemcached-dev gcc libc-dev g++ libxml2 libxslt libxslt-dev
11
 
12
  # TA-Lib
13
- RUN wget http://prdownloads.sourceforge.net/ta-lib/ta-lib-0.4.0-src.tar.gz && \
14
- tar -xvzf ta-lib-0.4.0-src.tar.gz && \
 
15
  cd ta-lib/ && \
16
- ./configure --prefix=/usr && \
17
- make && \
18
- make install && \
19
  cd ..
20
  RUN rm -R ta-lib ta-lib-0.4.0-src.tar.gz
21
 
 
 
 
22
  # Move working directory
23
  WORKDIR /app
24
 
25
  # Install python depencies
26
  RUN pip3 install --upgrade pip && pip3 install --no-cache-dir -q -r requirements.txt
27
 
28
- # Expose is NOT supported by Heroku
29
- # EXPOSE 5000
 
 
 
 
 
 
30
 
31
- # Run the image as a non-root user
32
- RUN adduser myuser
33
- USER myuser
34
 
35
- # Run the app. CMD is required to run on Heroku
36
- CMD gunicorn --bind 0.0.0.0:5000 app:app
 
1
  #Grab python=3.9-slim-buster image
2
  FROM python:3.9-slim-buster
3
 
 
 
 
 
4
  # RUN apt-get update
5
+ RUN apt-get update && apt-get install -y build-essential wget python3-dev python3-pip gcc default-jdk sudo
6
  # openldap-dev libffi-dev jpeg-dev zlib-dev libmemcached-dev gcc libc-dev g++ libxml2 libxslt libxslt-dev
7
 
8
  # TA-Lib
9
+ # RUN wget http://prdownloads.sourceforge.net/ta-lib/ta-lib-0.4.0-src.tar.gz
10
+ COPY ta-lib-0.4.0-src.tar.gz ta-lib-0.4.0-src.tar.gz
11
+ RUN tar -xvzf ta-lib-0.4.0-src.tar.gz && \
12
  cd ta-lib/ && \
13
+ ./configure --prefix=/usr --build=aarch64-unknown-linux-gnu && \
14
+ sudo make && \
15
+ sudo make install && \
16
  cd ..
17
  RUN rm -R ta-lib ta-lib-0.4.0-src.tar.gz
18
 
19
+ # Copy requirements to container
20
+ COPY app/requirements.txt /app/requirements.txt
21
+
22
  # Move working directory
23
  WORKDIR /app
24
 
25
  # Install python depencies
26
  RUN pip3 install --upgrade pip && pip3 install --no-cache-dir -q -r requirements.txt
27
 
28
+ # Copy code to container
29
+ COPY app /app
30
+
31
+ # Move working directory
32
+ WORKDIR /app
33
+
34
+ # Expose port 5000
35
+ EXPOSE 5000
36
 
37
+ # # Run the image as a non-root user
38
+ # RUN adduser myuser
39
+ # USER myuser
40
 
41
+ # Run the app on port 5000
42
+ CMD gunicorn --bind 0.0.0.0:5000 app:app
app/assets/img/dummy.txt DELETED
File without changes
app/dataset/Klasifikasi Industri Perusahaan Tercatat.pdf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:16d31e3ed82592828347ad047cea086300dc43b039518bfa1c1fd95069b7f12d
3
+ size 1378039
app/dataset/ihsg.db ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d0f2b153c332b00988eb7446012ca760b7a59f4d419acb856ed8458fa404fa1d
3
+ size 236658688
app/dataset/patterns.json ADDED
@@ -0,0 +1,63 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "CDL2CROWS":"Two Crows",
3
+ "CDL3BLACKCROWS":"Three Black Crows",
4
+ "CDL3INSIDE":"Three Inside Up/Down",
5
+ "CDL3LINESTRIKE":"Three-Line Strike",
6
+ "CDL3OUTSIDE":"Three Outside Up/Down",
7
+ "CDL3STARSINSOUTH":"Three Stars In The South",
8
+ "CDL3WHITESOLDIERS":"Three Advancing White Soldiers",
9
+ "CDLABANDONEDBABY":"Abandoned Baby",
10
+ "CDLADVANCEBLOCK":"Advance Block",
11
+ "CDLBELTHOLD":"Belt-hold",
12
+ "CDLBREAKAWAY":"Breakaway",
13
+ "CDLCLOSINGMARUBOZU":"Closing Marubozu",
14
+ "CDLCONCEALBABYSWALL":"Concealing Baby Swallow",
15
+ "CDLCOUNTERATTACK":"Counterattack",
16
+ "CDLDARKCLOUDCOVER":"Dark Cloud Cover",
17
+ "CDLDOJI":"Doji",
18
+ "CDLDOJISTAR":"Doji Star",
19
+ "CDLDRAGONFLYDOJI":"Dragonfly Doji",
20
+ "CDLENGULFING":"Engulfing Pattern",
21
+ "CDLEVENINGDOJISTAR":"Evening Doji Star",
22
+ "CDLEVENINGSTAR":"Evening Star",
23
+ "CDLGAPSIDESIDEWHITE":"Up/Down-gap side-by-side white lines",
24
+ "CDLGRAVESTONEDOJI":"Gravestone Doji",
25
+ "CDLHAMMER":"Hammer",
26
+ "CDLHANGINGMAN":"Hanging Man",
27
+ "CDLHARAMI":"Harami Pattern",
28
+ "CDLHARAMICROSS":"Harami Cross Pattern",
29
+ "CDLHIGHWAVE":"High-Wave Candle",
30
+ "CDLHIKKAKE":"Hikkake Pattern",
31
+ "CDLHIKKAKEMOD":"Modified Hikkake Pattern",
32
+ "CDLHOMINGPIGEON":"Homing Pigeon",
33
+ "CDLIDENTICAL3CROWS":"Identical Three Crows",
34
+ "CDLINNECK":"In-Neck Pattern",
35
+ "CDLINVERTEDHAMMER":"Inverted Hammer",
36
+ "CDLKICKING":"Kicking",
37
+ "CDLKICKINGBYLENGTH":"Kicking - bull/bear determined by the longer marubozu",
38
+ "CDLLADDERBOTTOM":"Ladder Bottom",
39
+ "CDLLONGLEGGEDDOJI":"Long Legged Doji",
40
+ "CDLLONGLINE":"Long Line Candle",
41
+ "CDLMARUBOZU":"Marubozu",
42
+ "CDLMATCHINGLOW":"Matching Low",
43
+ "CDLMATHOLD":"Mat Hold",
44
+ "CDLMORNINGDOJISTAR":"Morning Doji Star",
45
+ "CDLMORNINGSTAR":"Morning Star",
46
+ "CDLONNECK":"On-Neck Pattern",
47
+ "CDLPIERCING":"Piercing Pattern",
48
+ "CDLRICKSHAWMAN":"Rickshaw Man",
49
+ "CDLRISEFALL3METHODS":"Rising/Falling Three Methods",
50
+ "CDLSEPARATINGLINES":"Separating Lines",
51
+ "CDLSHOOTINGSTAR":"Shooting Star",
52
+ "CDLSHORTLINE":"Short Line Candle",
53
+ "CDLSPINNINGTOP":"Spinning Top",
54
+ "CDLSTALLEDPATTERN":"Stalled Pattern",
55
+ "CDLSTICKSANDWICH":"Stick Sandwich",
56
+ "CDLTAKURI":"Takuri (Dragonfly Doji with very long lower shadow)",
57
+ "CDLTASUKIGAP":"Tasuki Gap",
58
+ "CDLTHRUSTING":"Thrusting Pattern",
59
+ "CDLTRISTAR":"Tristar Pattern",
60
+ "CDLUNIQUE3RIVER":"Unique 3 River",
61
+ "CDLUPSIDEGAP2CROWS":"Upside Gap Two Crows",
62
+ "CDLXSIDEGAP3METHODS":"Upside/Downside Gap Three Methods"
63
+ }
app/dataset/pull_data.py ADDED
@@ -0,0 +1,149 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import tabula
2
+ import yfinance as yfi
3
+ import sqlite3
4
+ import pandas as pd
5
+ import json
6
+ import talib
7
+ import time
8
+ import datetime
9
+ import warnings
10
+
11
+ warnings.filterwarnings("ignore")
12
+
13
+ with open("patterns.json", "r") as f:
14
+ patterns = json.load(f)
15
+
16
+ update_time = datetime.datetime.now()
17
+ # dummy update time
18
+
19
+ def find_patterns(df):
20
+ result = pd.DataFrame(
21
+ columns=[
22
+ "Date",
23
+ "Kode",
24
+ "Pattern",
25
+ "Pattern_Score",
26
+ "Open_Close_Change",
27
+ "High_Low_Change",
28
+ ]
29
+ )
30
+ for attr, pattern in patterns.items():
31
+ scores = getattr(talib, attr)(df["Open"], df["High"], df["Low"], df["Close"])
32
+ mask = scores != 0
33
+ temp_result = df[mask]
34
+ if len(temp_result) > 0:
35
+ temp_result = temp_result.assign(
36
+ Open_Close_Change=(temp_result["Close"] - temp_result["Open"]) / temp_result["Open"],
37
+ High_Low_Change=(temp_result["High"] - temp_result["Low"]) / temp_result["Low"],
38
+ Pattern=[pattern] * len(temp_result),
39
+ Pattern_Score=scores[mask].values,
40
+ )[result.columns]
41
+ result = result.append(temp_result)
42
+ result = result.assign(time_updated = update_time)
43
+ return result
44
+
45
+
46
+ def pull_data_yfi():
47
+ start = time.time()
48
+ with sqlite3.connect("ihsg.db") as con:
49
+ tickers = pd.read_sql(
50
+ """
51
+ SELECT Kode FROM list_perusahaan
52
+ WHERE Kode != "IHSG"
53
+ """,
54
+ con=con,
55
+ ).values.flatten()
56
+ ihsg = (
57
+ yfi.download("^JKSE", start="2017-01-01", end="2023-01-10", progress=False)
58
+ .reset_index()
59
+ .dropna()
60
+ .assign(Kode="IHSG")
61
+ )
62
+ ihsg = ihsg[["Date", "Kode", "Open", "High", "Low", "Close", "Volume"]]
63
+ ihsg = ihsg.assign(time_updated = update_time)
64
+ ihsg.to_sql("historical", if_exists="replace", con=con, index=False)
65
+ pattern_search = find_patterns(ihsg)
66
+ pattern_search.to_sql("patterns", if_exists="replace", con=con, index=False)
67
+ print("INSERTION RESULT: \n")
68
+ print(pd.read_sql("SELECT * FROM historical", con=con).tail(10))
69
+ print(pd.read_sql("SELECT * FROM historical", con=con).shape)
70
+ print("\n\n*--\n")
71
+ for i in range(0, len(tickers), 50):
72
+ ticker = [f"{kode}.JK" for kode in tickers[i : i + 50]]
73
+ df = (
74
+ yfi.download(ticker, start="2017-01-01", end="2023-01-10", progress=False)
75
+ .T.unstack(level=1)
76
+ .T.reset_index()
77
+ .dropna()
78
+ .rename(columns={"level_1": "Kode"})
79
+ )
80
+ df = df[["Date", "Kode", "Open", "High", "Low", "Close", "Volume"]]
81
+ df["Kode"] = df["Kode"].str.replace(".JK", "")
82
+ for j, kode in enumerate(df["Kode"].unique()):
83
+ print(f"Finding Patterns for {kode} #{i+j+1}\t\t time elapsed = {time.time() - start:.2f} s")
84
+ pattern_search = find_patterns(df[df["Kode"] == kode])
85
+ pattern_search.to_sql("patterns", if_exists="append", con=con, index=False)
86
+ df = df.assign(time_updated = update_time)
87
+ df.to_sql("historical", if_exists="append", con=con, index=False)
88
+ print("INSERTION RESULT: \n")
89
+ print(pd.read_sql("SELECT * FROM historical", con=con).tail(10))
90
+ print(pd.read_sql("SELECT * FROM historical", con=con).shape)
91
+ print("\n\n*--\n")
92
+ time.sleep(60)
93
+ con.commit()
94
+
95
+ def pull_data_klasifikasi_industri():
96
+ with sqlite3.connect("ihsg.db") as con:
97
+ cur = con.cursor()
98
+ cur.execute("DROP TABLE IF EXISTS list_perusahaan")
99
+ cur.execute("""
100
+ CREATE TABLE list_perusahaan (
101
+ Kode VARCHAR(4),
102
+ Nama TEXT,
103
+ Sektor TEXT,
104
+ Instrumen TEXT)
105
+ """)
106
+ cur.execute("""
107
+ INSERT INTO list_perusahaan VALUES
108
+ ('IHSG', 'Indeks Harga Saham Gabungan', NULL, 'Indeks')
109
+ """)
110
+ # TODO: Change Schema from Star Schema to Snowflake Schema
111
+ # list_perusahaan table will be the dimension table for sector and sub-sector fact tables
112
+ # note: list_perusahaan table is a dimension table for historical fact table
113
+
114
+ dfs = tabula.read_pdf("Klasifikasi Industri Perusahaan Tercatat.pdf", pages="all", stream=True)
115
+ # print(len(dfs))
116
+ for df in dfs:
117
+ kode, nama, sektor = None, None, None
118
+ for row in df.iloc[2:,:].itertuples():
119
+ if kode is not None and pd.notna(row[2]):
120
+ cur.execute(f"""
121
+ INSERT INTO list_perusahaan VALUES
122
+ ('{kode}', '{nama}', '{sektor}', 'Saham')
123
+ """)
124
+ kode, nama, sektor = None, None, None
125
+ elif kode is not None and pd.isna(row[2]):
126
+ if pd.notna(row[3]):
127
+ nama += " " + row[3]
128
+ if pd.notna(row[5]):
129
+ sektor += " " + row[5]
130
+ if kode is None and nama is None and sektor is None and pd.notna(row[2]):
131
+ if "saham" in row[8].lower():
132
+ kode = row[2]
133
+ nama = row[3]
134
+ sektor = row[5]
135
+ else:
136
+ if kode is not None:
137
+ cur.execute(f"""
138
+ INSERT INTO list_perusahaan VALUES
139
+ ('{kode}', '{nama}', '{sektor}', 'Saham')
140
+ """)
141
+ print("INSERTION RESULT: \n")
142
+ print(pd.read_sql("SELECT * FROM list_perusahaan", con=con).tail(10))
143
+ print(pd.read_sql("SELECT * FROM list_perusahaan", con=con).shape)
144
+ print("\n\n*--\n")
145
+ con.commit()
146
+
147
+ if __name__ == "__main__":
148
+ pull_data_klasifikasi_industri()
149
+ pull_data_yfi()
app/helper_script.py ADDED
@@ -0,0 +1,176 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from bokeh.plotting import figure
2
+ from bokeh.models import ColumnDataSource, HoverTool, Arrow, NormalHead
3
+ from bokeh.palettes import Spectral4
4
+ from bokeh.embed import components
5
+ import sqlite3
6
+ import pandas as pd
7
+
8
+
9
+ def get_tickers(pattern, last_dates=1):
10
+ # connect to database
11
+ with sqlite3.connect("dataset/ihsg.db") as con:
12
+
13
+ # retrieve data from database
14
+ tickers = pd.read_sql(f"""
15
+ SELECT Kode
16
+ FROM patterns
17
+ WHERE Date IN (
18
+ SELECT Date
19
+ FROM (
20
+ SELECT Date, ROW_NUMBER() OVER(ORDER BY Date DESC) AS rnk
21
+ FROM historical
22
+ WHERE Kode = 'IHSG'
23
+ ) a
24
+ WHERE rnk <= {last_dates + 1}
25
+ )
26
+ AND Pattern = '{pattern}'
27
+ ORDER BY Pattern_Score DESC, Open_Close_Change DESC, High_Low_Change DESC
28
+ """,
29
+ con=con,
30
+ ).iloc[:, 0].to_list()
31
+
32
+ return tickers
33
+
34
+ def get_data(kode, pattern):
35
+
36
+ # connect to database
37
+ with sqlite3.connect("dataset/ihsg.db") as con:
38
+
39
+ # retrieve data from database
40
+ df = pd.read_sql(f"""
41
+ SELECT *
42
+ FROM historical
43
+ WHERE Kode = '{kode}'
44
+ ORDER BY Date
45
+ """,
46
+ con=con,
47
+ parse_dates=['Date'],
48
+ )
49
+
50
+ # df = pd.read_sql(f"""
51
+ # SELECT
52
+ # historical.Date,
53
+ # historical.Open,
54
+ # historical.High,
55
+ # historical.Low,
56
+ # historical.Close,
57
+ # patterns.Pattern_Score
58
+ # FROM historical
59
+ # LEFT JOIN (
60
+ # SELECT Date, Kode, Pattern_Score
61
+ # FROM patterns
62
+ # WHERE Pattern = '{pattern}'
63
+ # ) AS patterns
64
+ # USING(Kode, Date)
65
+ # WHERE Kode = '{kode}'
66
+ # ORDER BY Date
67
+ # """,
68
+ # con=con,
69
+ # parse_dates=['Date'],
70
+ # )
71
+
72
+ nama = pd.read_sql(
73
+ f"SELECT Nama FROM list_perusahaan WHERE Kode = '{kode}'",
74
+ con=con,
75
+ ).values[0][0]
76
+
77
+ return df, nama
78
+
79
+ def plot_candlestick(df, nama, kode):
80
+
81
+ # calculate simple moving average
82
+ for period in [5,20,200]:
83
+ df[f'sma{period}'] = df['Close'].rolling(period, period).mean()
84
+
85
+ # Prepare data for plotting
86
+ cds = ColumnDataSource(df)
87
+ cds_inc = ColumnDataSource(df[df["Close"] >= df["Open"]])
88
+ cds_dec = ColumnDataSource(df[df["Open"] > df["Close"]])
89
+
90
+ # assign figure canvas to variable p
91
+ x_range = (max(len(df) - 60.5, 0), len(df))
92
+ p = figure(
93
+ tools="pan,zoom_in,zoom_out,box_zoom,undo,redo,reset,save",
94
+ plot_width=600,
95
+ plot_height=400,
96
+ title = f"{kode}\t({nama})",
97
+ x_range= x_range,
98
+ y_range= (
99
+ df.loc[x_range[0]//1-5:x_range[1], ["Open", "High", "Low", "Close", "sma5", "sma20", "sma200"]].min().min() * 0.875,
100
+ df.loc[x_range[0]//1-5:x_range[1], ["Open", "High", "Low", "Close", "sma5", "sma20", "sma200"]].max().max() * 1.125
101
+ )
102
+ )
103
+
104
+ # xaxis setup
105
+ p.xaxis.major_label_overrides = {
106
+ i: date.strftime('%d %b %Y') for i, date in enumerate(df["Date"])
107
+ }
108
+ p.xaxis.bounds = (0, df.index[-1])
109
+ p.xaxis.major_label_orientation = (22/7)/4
110
+ p.grid.grid_line_alpha=0.3
111
+
112
+ # # plot pattern arrow
113
+ # for idx in df[df["Pattern_Score"].notna()].tail().index:
114
+ # row = df.loc[idx, ["Open", "High", "Low", "Close"]]
115
+ # x_start = row.min()
116
+ # if x_start < 200:
117
+ # x_start -= 2
118
+ # x_end = x_start - 4
119
+ # elif x_start < 500:
120
+ # x_start -= 4
121
+ # x_end = x_start - 4
122
+ # else:
123
+ # x_start -= 8
124
+ # x_end = x_start - 6
125
+ # p.add_layout(Arrow(
126
+ # end=NormalHead(fill_color="black"),
127
+ # line_color="black",
128
+ # x_start = x_start,
129
+ # x_end = x_end,
130
+ # y_start = idx,
131
+ # y_end=idx
132
+ # ))
133
+
134
+
135
+ # plot candlestick wicks with HoverTool
136
+ p.add_tools(HoverTool(
137
+ renderers=[p.segment("index", "High", "index", "Low", source=cds, color="black", line_width=1)],
138
+ tooltips=[
139
+ ("Date","@Date{%F}"),
140
+ ("Open","@Open{0.2f}"),
141
+ ("High", "@High{0.2f}"),
142
+ ("Low", "@Low{0.2f}"),
143
+ ("Close", "@Close{0.2f}"),
144
+ ],
145
+ formatters={"@Date":"datetime"}
146
+ ))
147
+
148
+ # plot candlestick bars
149
+ for data, color in [(cds_inc, "#26a69a"), (cds_dec, "#ef5350")]:
150
+ p.vbar("index", 0.5, "Open", "Close", source=data, fill_color=color, line_color="black", line_width=1)
151
+
152
+ # plot moving average with HoverTool
153
+ for period, color in zip([5,20,200], Spectral4):
154
+ p.add_tools(HoverTool(
155
+ renderers=[p.line(
156
+ "index",
157
+ f"sma{period}",
158
+ source=cds,
159
+ line_width=2,
160
+ alpha=0.8,
161
+ color=color,
162
+ legend_label=f'SMA {period}\t')],
163
+ tooltips=[
164
+ (f"SMA {period}", "@sma%s{0.2f}" %(period)),
165
+ ],
166
+ ))
167
+
168
+ # legend setup
169
+ p.legend.location = "top_left"
170
+ p.legend.click_policy="hide"
171
+ p.legend.orientation="horizontal"
172
+
173
+ # generate script and div
174
+ script, div = components(p)
175
+
176
+ return script, div
app/requirements.txt ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ # tabula-py==2.6.0
2
+ Flask==2.1.0
3
+ gunicorn==20.1.0
4
+ pandas>=1.2.5
5
+ yfinance==0.2.3
6
+ TA-Lib==0.4.20
7
+ bokeh==2.3.2
app/templates/index.html ADDED
@@ -0,0 +1,52 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <html lang="en">
2
+ <head>
3
+ <meta charset="UTF-8">
4
+ <meta http-equiv="X-UA-Compatible" content="IE=edge">
5
+ <meta name="viewport" content="width=device-width, initial-scale=1.0">
6
+ <link rel="stylesheet" href="./assets/css/styles.css" type="text/css">
7
+ <script src="https://ajax.googleapis.com/ajax/libs/jquery/3.5.1/jquery.min.js"></script>
8
+ <script type="text/javascript">
9
+ window.onload = function() {
10
+ $("#loading").hide();
11
+ };
12
+ function loading(){
13
+ $("#content").hide();
14
+ $("#loading").show();
15
+ }
16
+ </script>
17
+ {% block head %}{% endblock %}
18
+ <title>Candlestick Screener</title>
19
+ </head>
20
+ <body>
21
+ <div id="loading">
22
+ <!-- <img src="/assets/img/loading.gif"> -->
23
+ <img src="https://cdn.dribbble.com/users/115601/screenshots/5356365/loading.gif">
24
+ </div>
25
+ <div id="content">
26
+ <h1>Candlestick Screener</h1>
27
+ <form>
28
+ <label for="pattern">Find pattern:</label>
29
+ <select name="pattern">
30
+ {% if not selected %}
31
+ <option value="" selected disabled hidden></option>
32
+ {% endif %}
33
+ {% for pattern in patterns %}
34
+ {% if selected == pattern %}
35
+ <option value={{ pattern }} selected>{{ patterns[pattern] }}</option>
36
+ {% else %}
37
+ <option value={{ pattern }}>{{ patterns[pattern] }}</option>
38
+ {% endif %}
39
+ {% endfor %}
40
+ </select>
41
+ <input type="submit" value="search" onclick="loading();"/>
42
+ </form>
43
+ <form method="POST">
44
+ Database updated at: {{ last_update_time }}
45
+ <button type="submit" value="Update Database" onclick="loading();">
46
+ Update Database
47
+ </button>
48
+ </form>
49
+ {% block contents %}{% endblock %}
50
+ </div>
51
+ </body>
52
+ </html>
app/templates/no_pattern_found.html ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {% extends "index.html" %}
2
+
3
+ {% block contents %}
4
+
5
+ <h3>Candlestick pattern is not present in the last candlesticks for all stocks</h3>
6
+
7
+ {% endblock %}
app/templates/plot.html ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {% extends "index.html" %}
2
+
3
+ {% block head %}
4
+ {{ js_resources|indent(4)|safe }}
5
+ {{ css_resources|indent(4)|safe }}
6
+ {% for plot_script in plot_scripts %}
7
+ {{ plot_script|indent(4)|safe }}
8
+ {% endfor %}
9
+ {% endblock %}
10
+
11
+ {% block contents %}
12
+ {% for plot_div in plot_divs %}
13
+ {{ plot_div | safe }}
14
+ {% endfor %}
15
+ {% endblock %}
app/update_database.py ADDED
@@ -0,0 +1,116 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pandas as pd
2
+ import sqlite3
3
+ import json
4
+ import datetime
5
+ import time
6
+ import yfinance as yfi
7
+ import talib
8
+ import warnings
9
+ warnings.filterwarnings("ignore")
10
+
11
+ # retrieve pattern data
12
+ with open("dataset/patterns.json") as f:
13
+ patterns = json.load(f)
14
+
15
+
16
+ def get_last_update_time():
17
+ with sqlite3.connect("dataset/ihsg.db") as con:
18
+ return pd.read_sql("SELECT MAX(time_updated) FROM historical", con=con).values[0][0][:19]
19
+
20
+ def find_patterns(df):
21
+ result = pd.DataFrame(
22
+ columns=[
23
+ "Date",
24
+ "Kode",
25
+ "Pattern",
26
+ "Pattern_Score",
27
+ "Open_Close_Change",
28
+ "High_Low_Change",
29
+ ]
30
+ )
31
+ for attr, pattern in patterns.items():
32
+ scores = getattr(talib, attr)(df["Open"], df["High"], df["Low"], df["Close"])
33
+ mask = scores != 0
34
+ temp_result = df[mask]
35
+ if len(temp_result) > 0:
36
+ temp_result = temp_result.assign(
37
+ Open_Close_Change=(temp_result["Close"] - temp_result["Open"]) / temp_result["Open"],
38
+ High_Low_Change=(temp_result["High"] - temp_result["Low"]) / temp_result["Low"],
39
+ Pattern=[pattern] * len(temp_result),
40
+ Pattern_Score=scores[mask].values,
41
+ )[result.columns]
42
+ result = result.append(temp_result)
43
+ result = result.assign(time_updated = datetime.datetime.now())
44
+ return result
45
+
46
+
47
+ def update_database():
48
+
49
+ with sqlite3.connect("dataset/ihsg.db") as con:
50
+ start_date = datetime.datetime.strptime(
51
+ pd.read_sql("SELECT MAX(Date) FROM historical", con=con).values[0][0],
52
+ "%Y-%m-%d %H:%M:%S"
53
+ )
54
+ start_date += pd.offsets.DateOffset(days=1)
55
+ start_date = datetime.datetime.strftime(start_date, "%Y-%m-%d")
56
+ end_date = datetime.datetime.now()
57
+ if (end_date.hour) < 15:
58
+ end_date -= pd.offsets.DateOffset(days = 1)
59
+ end_date = datetime.datetime.strftime(end_date, "%Y-%m-%d")
60
+ ihsg = (
61
+ yfi.download("^JKSE", start=start_date, end=end_date, progress=False)
62
+ .dropna()
63
+ )[start_date:end_date]
64
+ print(f"New Data IHSG {start_date}-{end_date}\n", len(ihsg), " rows\t")
65
+ if len(ihsg) > 0:
66
+ print(ihsg)
67
+ ihsg = (
68
+ ihsg.assign(
69
+ Kode="IHSG",
70
+ time_updated = datetime.datetime.now(),
71
+ )
72
+ .reset_index()
73
+ )[["Date", "Kode", "Open", "High", "Low", "Close", "Volume", "time_updated"]]
74
+ ihsg.to_sql("historical", if_exists="append", con=con, index=False)
75
+ tickers = pd.read_sql(
76
+ """
77
+ SELECT DISTINCT Kode FROM historical
78
+ WHERE Kode != "IHSG"
79
+ """,
80
+ con=con,
81
+ ).iloc[:,0].to_list()
82
+ print("UPDATING historical TABLE..")
83
+ for i in range(0, len(tickers), 50):
84
+ ticker = [f"{kode}.JK" for kode in tickers[i : i + 50]]
85
+ df = (
86
+ yfi.download(ticker, start=start_date, end=end_date, progress=False)
87
+ .T.unstack(level=1)
88
+ .T.reset_index()
89
+ .dropna()
90
+ .rename(columns={"level_1": "Kode"})
91
+ )[["Date", "Kode", "Open", "High", "Low", "Close", "Volume"]]
92
+ df["Kode"] = df["Kode"].str.replace(".JK", "")
93
+ df = df.assign(time_updated = datetime.datetime.now())
94
+ df.to_sql("historical", if_exists="append", con=con, index=False)
95
+
96
+
97
+ # update patterns database
98
+ tickers = ["IHSG"] + tickers
99
+ start = time.time()
100
+ for i, kode in enumerate(tickers):
101
+ print(f"Finding Patterns for {kode} #{i+1}\t\t time elapsed = {time.time() - start:.2f} s")
102
+ try:
103
+ search_result = find_patterns(df=pd.read_sql(f"""
104
+ SELECT *
105
+ FROM historical
106
+ WHERE Kode = '{kode}'
107
+ ORDER BY Date
108
+ """,
109
+ con=con,
110
+ ))
111
+ if i == 0:
112
+ search_result.to_sql("patterns", if_exists="replace", con=con, index=False)
113
+ else:
114
+ search_result.to_sql("patterns", if_exists="append", con=con, index=False)
115
+ except:
116
+ pass
ta-lib-0.4.0-src.tar.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9ff41efcb1c011a4b4b6dfc91610b06e39b1d7973ed5d4dee55029a0ac4dc651
3
+ size 1330299