vdwow commited on
Commit
b808e59
1 Parent(s): 636212b

Upload 3 files

Browse files

add chatgpt url import and locations static comparison in expert mode

Files changed (3) hide show
  1. app.py +562 -406
  2. requirements-dev.txt +6 -3
  3. requirements.txt +5 -2
app.py CHANGED
@@ -1,406 +1,562 @@
1
- import gradio as gr
2
-
3
- from ecologits.tracers.utils import compute_llm_impacts, _avg
4
- from ecologits.impacts.llm import compute_llm_impacts as compute_llm_impacts_expert
5
- from ecologits.impacts.llm import IF_ELECTRICITY_MIX_GWP, IF_ELECTRICITY_MIX_ADPE, IF_ELECTRICITY_MIX_PE
6
- from ecologits.model_repository import models
7
-
8
- from src.assets import custom_css
9
- from src.electricity_mix import COUNTRY_CODES, find_electricity_mix
10
- from src.content import (
11
- HERO_TEXT,
12
- ABOUT_TEXT,
13
- CITATION_LABEL,
14
- CITATION_TEXT,
15
- LICENCE_TEXT, METHODOLOGY_TEXT
16
- )
17
- from src.constants import (
18
- PROVIDERS,
19
- OPENAI_MODELS,
20
- ANTHROPIC_MODELS,
21
- COHERE_MODELS,
22
- META_MODELS,
23
- MISTRALAI_MODELS,
24
- PROMPTS,
25
- CLOSED_SOURCE_MODELS,
26
- MODELS,
27
- )
28
- from src.utils import (
29
- format_impacts,
30
- format_energy_eq_physical_activity,
31
- PhysicalActivity,
32
- format_energy_eq_electric_vehicle,
33
- format_gwp_eq_streaming, format_energy_eq_electricity_production, EnergyProduction,
34
- format_gwp_eq_airplane_paris_nyc, format_energy_eq_electricity_consumption_ireland
35
- )
36
-
37
- CUSTOM = "Custom"
38
-
39
-
40
- def model_list(provider: str) -> gr.Dropdown:
41
- if provider == "openai":
42
- return gr.Dropdown(
43
- OPENAI_MODELS,
44
- label="Model",
45
- value=OPENAI_MODELS[0][1],
46
- filterable=True,
47
- )
48
- elif provider == "anthropic":
49
- return gr.Dropdown(
50
- ANTHROPIC_MODELS,
51
- label="Model",
52
- value=ANTHROPIC_MODELS[0][1],
53
- filterable=True,
54
- )
55
- elif provider == "cohere":
56
- return gr.Dropdown(
57
- COHERE_MODELS,
58
- label="Model",
59
- value=COHERE_MODELS[0][1],
60
- filterable=True,
61
- )
62
- elif provider == "huggingface_hub/meta":
63
- return gr.Dropdown(
64
- META_MODELS,
65
- label="Model",
66
- value=META_MODELS[0][1],
67
- filterable=True,
68
- )
69
- elif provider == "mistralai":
70
- return gr.Dropdown(
71
- MISTRALAI_MODELS,
72
- label="Model",
73
- value=MISTRALAI_MODELS[0][1],
74
- filterable=True,
75
- )
76
-
77
-
78
- def custom():
79
- return CUSTOM
80
-
81
-
82
- def model_active_params_fn(model_name: str, n_param: float):
83
- if model_name == CUSTOM:
84
- return n_param
85
- provider, model_name = model_name.split('/', 1)
86
- model = models.find_model(provider=provider, model_name=model_name)
87
- return model.active_parameters or _avg(model.active_parameters_range)
88
-
89
-
90
- def model_total_params_fn(model_name: str, n_param: float):
91
- if model_name == CUSTOM:
92
- return n_param
93
- provider, model_name = model_name.split('/', 1)
94
- model = models.find_model(provider=provider, model_name=model_name)
95
- return model.total_parameters or _avg(model.total_parameters_range)
96
-
97
-
98
- def mix_fn(country_code: str, mix_adpe: float, mix_pe: float, mix_gwp: float):
99
- if country_code == CUSTOM:
100
- return mix_adpe, mix_pe, mix_gwp
101
- return find_electricity_mix(country_code)
102
-
103
-
104
- with gr.Blocks(css=custom_css) as demo:
105
- gr.Markdown(HERO_TEXT)
106
-
107
- with gr.Tab("🧮 Calculator"):
108
- with gr.Row():
109
- gr.Markdown("# Estimate the environmental impacts of LLM inference")
110
- with gr.Row():
111
- input_provider = gr.Dropdown(
112
- PROVIDERS,
113
- label="Provider",
114
- value=PROVIDERS[0][1],
115
- filterable=True,
116
- )
117
-
118
- input_model = gr.Dropdown(
119
- OPENAI_MODELS,
120
- label="Model",
121
- value=OPENAI_MODELS[0][1],
122
- filterable=True,
123
- )
124
- input_provider.change(model_list, input_provider, input_model)
125
-
126
- input_prompt = gr.Dropdown(
127
- PROMPTS,
128
- label="Example prompt",
129
- value=400,
130
- )
131
-
132
-
133
- @gr.render(inputs=[input_provider, input_model, input_prompt])
134
- def render_simple(provider, model, prompt):
135
- if provider.startswith("huggingface_hub"):
136
- provider = provider.split("/")[0]
137
- if models.find_model(provider, model) is not None:
138
- impacts = compute_llm_impacts(
139
- provider=provider,
140
- model_name=model,
141
- output_token_count=prompt,
142
- request_latency=100000
143
- )
144
- impacts = format_impacts(impacts)
145
-
146
- # Inference impacts
147
- with gr.Blocks():
148
- if f"{provider}/{model}" in CLOSED_SOURCE_MODELS:
149
- with gr.Row():
150
- gr.Markdown("""<p> ⚠️ You have selected a closed-source model. Please be aware that
151
- some providers do not fully disclose information about such models. Consequently, our
152
- estimates have a lower precision for closed-source models. For further details, refer to
153
- our FAQ in the About section.
154
- </p>""", elem_classes="warning-box")
155
-
156
- with gr.Row():
157
- gr.Markdown("""
158
- ## Environmental impacts
159
-
160
- To understand how the environmental impacts are computed go to the 📖 Methodology tab.
161
- """)
162
- with gr.Row():
163
- with gr.Column(scale=1, min_width=220):
164
- gr.Markdown(f"""
165
- <h2 align="center">⚡️ Energy</h2>
166
- $$ \Large {impacts.energy.magnitude:.3g} \ \large {impacts.energy.units} $$
167
- <p align="center"><i>Evaluates the electricity consumption<i></p><br>
168
- """)
169
- with gr.Column(scale=1, min_width=220):
170
- gr.Markdown(f"""
171
- <h2 align="center">🌍️ GHG Emissions</h2>
172
- $$ \Large {impacts.gwp.magnitude:.3g} \ \large {impacts.gwp.units} $$
173
- <p align="center"><i>Evaluates the effect on global warming<i></p><br>
174
- """)
175
- with gr.Column(scale=1, min_width=220):
176
- gr.Markdown(f"""
177
- <h2 align="center">🪨 Abiotic Resources</h2>
178
- $$ \Large {impacts.adpe.magnitude:.3g} \ \large {impacts.adpe.units} $$
179
- <p align="center"><i>Evaluates the use of metals and minerals<i></p><br>
180
- """)
181
- with gr.Column(scale=1, min_width=220):
182
- gr.Markdown(f"""
183
- <h2 align="center">⛽️ Primary Energy</h2>
184
- $$ \Large {impacts.pe.magnitude:.3g} \ \large {impacts.pe.units} $$
185
- <p align="center"><i>Evaluates the use of energy resources<i></p><br>
186
- """)
187
-
188
- # Impacts equivalents
189
- with gr.Blocks():
190
- with gr.Row():
191
- gr.Markdown("""
192
- ---
193
- ## That's equivalent to...
194
-
195
- Making this request to the LLM is equivalent to the following actions.
196
- """)
197
- with gr.Row():
198
- physical_activity, distance = format_energy_eq_physical_activity(impacts.energy)
199
- if physical_activity == PhysicalActivity.WALKING:
200
- physical_activity = "🚶 " + physical_activity.capitalize()
201
- if physical_activity == PhysicalActivity.RUNNING:
202
- physical_activity = "🏃 " + physical_activity.capitalize()
203
- with gr.Column(scale=1, min_width=300):
204
- gr.Markdown(f"""
205
- <h2 align="center">{physical_activity} $$ \Large {distance.magnitude:.3g}\ {distance.units} $$ </h2>
206
- <p align="center"><i>Based on energy consumption<i></p><br>
207
- """, latex_delimiters=[{"left": "$$", "right": "$$", "display": False}])
208
-
209
- ev_eq = format_energy_eq_electric_vehicle(impacts.energy)
210
- with gr.Column(scale=1, min_width=300):
211
- gr.Markdown(f"""
212
- <h2 align="center">🔋 Electric Vehicle $$ \Large {ev_eq.magnitude:.3g}\ {ev_eq.units} $$ </h2>
213
- <p align="center"><i>Based on energy consumption<i></p><br>
214
- """, latex_delimiters=[{"left": "$$", "right": "$$", "display": False}])
215
-
216
- streaming_eq = format_gwp_eq_streaming(impacts.gwp)
217
- with gr.Column(scale=1, min_width=300):
218
- gr.Markdown(f"""
219
- <h2 align="center">⏯️ Streaming $$ \Large {streaming_eq.magnitude:.3g}\ {streaming_eq.units} $$ </h2>
220
- <p align="center"><i>Based on GHG emissions<i></p><br>
221
- """, latex_delimiters=[{"left": "$$", "right": "$$", "display": False}])
222
-
223
- # Bigger scale impacts equivalent
224
- with gr.Blocks():
225
- with gr.Row():
226
- gr.Markdown("""
227
- ## What if 1% of the planet does this request everyday for 1 year?
228
-
229
- If this use case is largely deployed around the world the equivalent impacts would be. (The
230
- impacts of this request x 1% of 8 billion people x 365 days in a year.)
231
- """)
232
- with gr.Row():
233
- electricity_production, count = format_energy_eq_electricity_production(impacts.energy)
234
- if electricity_production == EnergyProduction.NUCLEAR:
235
- emoji = "☢️"
236
- name = "Nuclear power plants"
237
- if electricity_production == EnergyProduction.WIND:
238
- emoji = "💨️ "
239
- name = "Wind turbines"
240
- with gr.Column(scale=1, min_width=300):
241
- gr.Markdown(f"""
242
- <h2 align="center">{emoji} $$ \Large {count.magnitude:.0f} $$ {name} <span style="font-size: 12px">(yearly)</span></h2>
243
- <p align="center"><i>Based on electricity consumption<i></p><br>
244
- """, latex_delimiters=[{"left": "$$", "right": "$$", "display": False}])
245
-
246
- ireland_count = format_energy_eq_electricity_consumption_ireland(impacts.energy)
247
- with gr.Column(scale=1, min_width=300):
248
- gr.Markdown(f"""
249
- <h2 align="center">🇮🇪 $$ \Large {ireland_count.magnitude:.2g} $$ x Ireland <span style="font-size: 12px">(yearly ⚡️ cons.)</span></h2>
250
- <p align="center"><i>Based on electricity consumption<i></p><br>
251
- """, latex_delimiters=[{"left": "$$", "right": "$$", "display": False}])
252
-
253
- paris_nyc_airplane = format_gwp_eq_airplane_paris_nyc(impacts.gwp)
254
- with gr.Column(scale=1, min_width=300):
255
- gr.Markdown(f"""
256
- <h2 align="center">✈️ $$ \Large {paris_nyc_airplane.magnitude:,.0f} $$ Paris NYC </h2>
257
- <p align="center"><i>Based on GHG emissions<i></p><br>
258
- """, latex_delimiters=[{"left": "$$", "right": "$$", "display": False}])
259
-
260
- with gr.Tab("🤓 Expert Mode"):
261
- with gr.Row():
262
- gr.Markdown("# 🤓 Expert mode")
263
- model = gr.Dropdown(
264
- MODELS + [CUSTOM],
265
- label="Model name",
266
- value="openai/gpt-3.5-turbo",
267
- filterable=True,
268
- interactive=True
269
- )
270
- input_model_active_params = gr.Number(
271
- label="Number of billions of active parameters",
272
- value=45.0,
273
- interactive=True
274
- )
275
- input_model_total_params = gr.Number(
276
- label="Number of billions of total parameters",
277
- value=45.0,
278
- interactive=True
279
- )
280
-
281
- model.change(fn=model_active_params_fn,
282
- inputs=[model, input_model_active_params],
283
- outputs=[input_model_active_params])
284
- model.change(fn=model_total_params_fn,
285
- inputs=[model, input_model_total_params],
286
- outputs=[input_model_total_params])
287
- input_model_active_params.input(fn=custom, outputs=[model])
288
- input_model_total_params.input(fn=custom, outputs=[model])
289
-
290
- input_tokens = gr.Number(
291
- label="Output tokens",
292
- value=100
293
- )
294
-
295
- mix = gr.Dropdown(
296
- COUNTRY_CODES + [CUSTOM],
297
- label="Location",
298
- value="WOR",
299
- filterable=True,
300
- interactive=True
301
- )
302
- input_mix_gwp = gr.Number(
303
- label="Electricity mix - GHG emissions [kgCO2eq / kWh]",
304
- value=IF_ELECTRICITY_MIX_GWP,
305
- interactive=True
306
- )
307
- input_mix_adpe = gr.Number(
308
- label="Electricity mix - Abiotic resources [kgSbeq / kWh]",
309
- value=IF_ELECTRICITY_MIX_ADPE,
310
- interactive=True
311
- )
312
- input_mix_pe = gr.Number(
313
- label="Electricity mix - Primary energy [MJ / kWh]",
314
- value=IF_ELECTRICITY_MIX_PE,
315
- interactive=True
316
- )
317
-
318
- mix.change(fn=mix_fn,
319
- inputs=[mix, input_mix_adpe, input_mix_pe, input_mix_gwp],
320
- outputs=[input_mix_adpe, input_mix_pe, input_mix_gwp])
321
- input_mix_gwp.input(fn=custom, outputs=mix)
322
- input_mix_adpe.input(fn=custom, outputs=mix)
323
- input_mix_pe.input(fn=custom, outputs=mix)
324
-
325
-
326
- @gr.render(inputs=[
327
- input_model_active_params,
328
- input_model_total_params,
329
- input_tokens,
330
- input_mix_gwp,
331
- input_mix_adpe,
332
- input_mix_pe
333
- ])
334
- def render_expert(
335
- model_active_params,
336
- model_total_params,
337
- tokens,
338
- mix_gwp,
339
- mix_adpe,
340
- mix_pe
341
- ):
342
- impacts = compute_llm_impacts_expert(
343
- model_active_parameter_count=model_active_params,
344
- model_total_parameter_count=model_total_params,
345
- output_token_count=tokens,
346
- request_latency=100000,
347
- if_electricity_mix_gwp=mix_gwp,
348
- if_electricity_mix_adpe=mix_adpe,
349
- if_electricity_mix_pe=mix_pe
350
- )
351
- impacts = format_impacts(impacts)
352
-
353
- with gr.Blocks():
354
- with gr.Row():
355
- gr.Markdown("## Environmental impacts")
356
- with gr.Row():
357
- with gr.Column(scale=1, min_width=220):
358
- gr.Markdown(f"""
359
- <h2 align="center">⚡️ Energy</h2>
360
- $$ \Large {impacts.energy.magnitude:.3g} \ \large {impacts.energy.units} $$
361
- <p align="center"><i>Evaluates the electricity consumption<i></p><br>
362
- """)
363
- with gr.Column(scale=1, min_width=220):
364
- gr.Markdown(f"""
365
- <h2 align="center">🌍️ GHG Emissions</h2>
366
- $$ \Large {impacts.gwp.magnitude:.3g} \ \large {impacts.gwp.units} $$
367
- <p align="center"><i>Evaluates the effect on global warming<i></p><br>
368
- """)
369
- with gr.Column(scale=1, min_width=220):
370
- gr.Markdown(f"""
371
- <h2 align="center">🪨 Abiotic Resources</h2>
372
- $$ \Large {impacts.adpe.magnitude:.3g} \ \large {impacts.adpe.units} $$
373
- <p align="center"><i>Evaluates the use of metals and minerals<i></p><br>
374
- """)
375
- with gr.Column(scale=1, min_width=220):
376
- gr.Markdown(f"""
377
- <h2 align="center">⛽️ Primary Energy</h2>
378
- $$ \Large {impacts.pe.magnitude:.3g} \ \large {impacts.pe.units} $$
379
- <p align="center"><i>Evaluates the use of energy resources<i></p><br>
380
- """)
381
-
382
- with gr.Tab("📖 Methodology"):
383
- gr.Markdown(METHODOLOGY_TEXT,
384
- elem_classes="descriptive-text",
385
- latex_delimiters=[
386
- {"left": "$$", "right": "$$", "display": True},
387
- {"left": "$", "right": "$", "display": False}
388
- ])
389
-
390
- with gr.Tab("ℹ️ About"):
391
- gr.Markdown(ABOUT_TEXT, elem_classes="descriptive-text",)
392
-
393
- with gr.Accordion("📚 Citation", open=False):
394
- gr.Textbox(
395
- value=CITATION_TEXT,
396
- label=CITATION_LABEL,
397
- interactive=False,
398
- show_copy_button=True,
399
- lines=len(CITATION_TEXT.split('\n')),
400
- )
401
-
402
- # License
403
- gr.Markdown(LICENCE_TEXT)
404
-
405
- if __name__ == '__main__':
406
- demo.launch()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+
3
+ import requests
4
+ from bs4 import BeautifulSoup
5
+
6
+ import tiktoken
7
+
8
+ import matplotlib
9
+ import matplotlib.pyplot as plt
10
+
11
+ from ecologits.tracers.utils import compute_llm_impacts, _avg
12
+ from ecologits.impacts.llm import compute_llm_impacts as compute_llm_impacts_expert
13
+ from ecologits.impacts.llm import IF_ELECTRICITY_MIX_GWP, IF_ELECTRICITY_MIX_ADPE, IF_ELECTRICITY_MIX_PE
14
+ from ecologits.model_repository import models
15
+
16
+ from src.assets import custom_css
17
+ from src.electricity_mix import COUNTRY_CODES, find_electricity_mix
18
+ from src.content import (
19
+ HERO_TEXT,
20
+ ABOUT_TEXT,
21
+ CITATION_LABEL,
22
+ CITATION_TEXT,
23
+ LICENCE_TEXT, METHODOLOGY_TEXT
24
+ )
25
+ from src.constants import (
26
+ PROVIDERS,
27
+ OPENAI_MODELS,
28
+ ANTHROPIC_MODELS,
29
+ COHERE_MODELS,
30
+ META_MODELS,
31
+ MISTRALAI_MODELS,
32
+ PROMPTS,
33
+ CLOSED_SOURCE_MODELS,
34
+ MODELS,
35
+ )
36
+ from src.utils import (
37
+ format_impacts,
38
+ format_energy_eq_physical_activity,
39
+ PhysicalActivity,
40
+ format_energy_eq_electric_vehicle,
41
+ format_gwp_eq_streaming, format_energy_eq_electricity_production, EnergyProduction,
42
+ format_gwp_eq_airplane_paris_nyc, format_energy_eq_electricity_consumption_ireland
43
+ )
44
+
45
+ CUSTOM = "Custom"
46
+
47
+ tokenizer = tiktoken.get_encoding('cl100k_base')
48
+
49
+ def model_list(provider: str) -> gr.Dropdown:
50
+ if provider == "openai":
51
+ return gr.Dropdown(
52
+ OPENAI_MODELS,
53
+ label="Model",
54
+ value=OPENAI_MODELS[0][1],
55
+ filterable=True,
56
+ )
57
+ elif provider == "anthropic":
58
+ return gr.Dropdown(
59
+ ANTHROPIC_MODELS,
60
+ label="Model",
61
+ value=ANTHROPIC_MODELS[0][1],
62
+ filterable=True,
63
+ )
64
+ elif provider == "cohere":
65
+ return gr.Dropdown(
66
+ COHERE_MODELS,
67
+ label="Model",
68
+ value=COHERE_MODELS[0][1],
69
+ filterable=True,
70
+ )
71
+ elif provider == "huggingface_hub/meta":
72
+ return gr.Dropdown(
73
+ META_MODELS,
74
+ label="Model",
75
+ value=META_MODELS[0][1],
76
+ filterable=True,
77
+ )
78
+ elif provider == "mistralai":
79
+ return gr.Dropdown(
80
+ MISTRALAI_MODELS,
81
+ label="Model",
82
+ value=MISTRALAI_MODELS[0][1],
83
+ filterable=True,
84
+ )
85
+
86
+
87
+ def custom():
88
+ return CUSTOM
89
+
90
+ def tiktoken_len(text):
91
+ tokens = tokenizer.encode(
92
+ text,
93
+ disallowed_special=()
94
+ )
95
+ return len(tokens)
96
+
97
+ def model_active_params_fn(model_name: str, n_param: float):
98
+ if model_name == CUSTOM:
99
+ return n_param
100
+ provider, model_name = model_name.split('/', 1)
101
+ model = models.find_model(provider=provider, model_name=model_name)
102
+ return model.active_parameters or _avg(model.active_parameters_range)
103
+
104
+
105
+ def model_total_params_fn(model_name: str, n_param: float):
106
+ if model_name == CUSTOM:
107
+ return n_param
108
+ provider, model_name = model_name.split('/', 1)
109
+ model = models.find_model(provider=provider, model_name=model_name)
110
+ return model.total_parameters or _avg(model.total_parameters_range)
111
+
112
+
113
+ def mix_fn(country_code: str, mix_adpe: float, mix_pe: float, mix_gwp: float):
114
+ if country_code == CUSTOM:
115
+ return mix_adpe, mix_pe, mix_gwp
116
+ return find_electricity_mix(country_code)
117
+
118
+ with gr.Blocks(css=custom_css) as demo:
119
+ gr.Markdown(HERO_TEXT)
120
+
121
+ with gr.Tab("🧮 Calculator"):
122
+ with gr.Row():
123
+ gr.Markdown("# Estimate the environmental impacts of LLM inference")
124
+ with gr.Row():
125
+ input_provider = gr.Dropdown(
126
+ PROVIDERS,
127
+ label="Provider",
128
+ value=PROVIDERS[0][1],
129
+ filterable=True,
130
+ )
131
+
132
+ input_model = gr.Dropdown(
133
+ OPENAI_MODELS,
134
+ label="Model",
135
+ value=OPENAI_MODELS[0][1],
136
+ filterable=True,
137
+ )
138
+ input_provider.change(model_list, input_provider, input_model)
139
+
140
+ input_prompt = gr.Dropdown(
141
+ PROMPTS,
142
+ label="Example prompt",
143
+ value=400,
144
+ )
145
+
146
+
147
+ @gr.render(inputs=[input_provider, input_model, input_prompt])
148
+ def render_simple(provider, model, prompt):
149
+ if provider.startswith("huggingface_hub"):
150
+ provider = provider.split("/")[0]
151
+ if models.find_model(provider, model) is not None:
152
+ impacts = compute_llm_impacts(
153
+ provider=provider,
154
+ model_name=model,
155
+ output_token_count=prompt,
156
+ request_latency=100000
157
+ )
158
+ impacts = format_impacts(impacts)
159
+
160
+ # Inference impacts
161
+ with gr.Blocks():
162
+ if f"{provider}/{model}" in CLOSED_SOURCE_MODELS:
163
+ with gr.Row():
164
+ gr.Markdown("""<p> ⚠️ You have selected a closed-source model. Please be aware that
165
+ some providers do not fully disclose information about such models. Consequently, our
166
+ estimates have a lower precision for closed-source models. For further details, refer to
167
+ our FAQ in the About section.
168
+ </p>""", elem_classes="warning-box")
169
+
170
+ with gr.Row():
171
+ gr.Markdown("""
172
+ ## Environmental impacts
173
+
174
+ To understand how the environmental impacts are computed go to the 📖 Methodology tab.
175
+ """)
176
+ with gr.Row():
177
+ with gr.Column(scale=1, min_width=220):
178
+ gr.Markdown(f"""
179
+ <h2 align="center">⚡️ Energy</h2>
180
+ $$ \Large {impacts.energy.magnitude:.3g} \ \large {impacts.energy.units} $$
181
+ <p align="center"><i>Evaluates the electricity consumption<i></p><br>
182
+ """)
183
+ with gr.Column(scale=1, min_width=220):
184
+ gr.Markdown(f"""
185
+ <h2 align="center">🌍️ GHG Emissions</h2>
186
+ $$ \Large {impacts.gwp.magnitude:.3g} \ \large {impacts.gwp.units} $$
187
+ <p align="center"><i>Evaluates the effect on global warming<i></p><br>
188
+ """)
189
+ with gr.Column(scale=1, min_width=220):
190
+ gr.Markdown(f"""
191
+ <h2 align="center">🪨 Abiotic Resources</h2>
192
+ $$ \Large {impacts.adpe.magnitude:.3g} \ \large {impacts.adpe.units} $$
193
+ <p align="center"><i>Evaluates the use of metals and minerals<i></p><br>
194
+ """)
195
+ with gr.Column(scale=1, min_width=220):
196
+ gr.Markdown(f"""
197
+ <h2 align="center">⛽️ Primary Energy</h2>
198
+ $$ \Large {impacts.pe.magnitude:.3g} \ \large {impacts.pe.units} $$
199
+ <p align="center"><i>Evaluates the use of energy resources<i></p><br>
200
+ """)
201
+
202
+ # Impacts equivalents
203
+ with gr.Blocks():
204
+ with gr.Row():
205
+ gr.Markdown("""
206
+ ---
207
+ ## That's equivalent to...
208
+
209
+ Making this request to the LLM is equivalent to the following actions.
210
+ """)
211
+ with gr.Row():
212
+ physical_activity, distance = format_energy_eq_physical_activity(impacts.energy)
213
+ if physical_activity == PhysicalActivity.WALKING:
214
+ physical_activity = "🚶 " + physical_activity.capitalize()
215
+ if physical_activity == PhysicalActivity.RUNNING:
216
+ physical_activity = "🏃 " + physical_activity.capitalize()
217
+ with gr.Column(scale=1, min_width=300):
218
+ gr.Markdown(f"""
219
+ <h2 align="center">{physical_activity} $$ \Large {distance.magnitude:.3g}\ {distance.units} $$ </h2>
220
+ <p align="center"><i>Based on energy consumption<i></p><br>
221
+ """, latex_delimiters=[{"left": "$$", "right": "$$", "display": False}])
222
+
223
+ ev_eq = format_energy_eq_electric_vehicle(impacts.energy)
224
+ with gr.Column(scale=1, min_width=300):
225
+ gr.Markdown(f"""
226
+ <h2 align="center">🔋 Electric Vehicle $$ \Large {ev_eq.magnitude:.3g}\ {ev_eq.units} $$ </h2>
227
+ <p align="center"><i>Based on energy consumption<i></p><br>
228
+ """, latex_delimiters=[{"left": "$$", "right": "$$", "display": False}])
229
+
230
+ streaming_eq = format_gwp_eq_streaming(impacts.gwp)
231
+ with gr.Column(scale=1, min_width=300):
232
+ gr.Markdown(f"""
233
+ <h2 align="center">⏯️ Streaming $$ \Large {streaming_eq.magnitude:.3g}\ {streaming_eq.units} $$ </h2>
234
+ <p align="center"><i>Based on GHG emissions<i></p><br>
235
+ """, latex_delimiters=[{"left": "$$", "right": "$$", "display": False}])
236
+
237
+ # Bigger scale impacts equivalent
238
+ with gr.Blocks():
239
+ with gr.Row():
240
+ gr.Markdown("""
241
+ ## What if 1% of the planet does this request everyday for 1 year?
242
+
243
+ If this use case is largely deployed around the world the equivalent impacts would be. (The
244
+ impacts of this request x 1% of 8 billion people x 365 days in a year.)
245
+ """)
246
+ with gr.Row():
247
+ electricity_production, count = format_energy_eq_electricity_production(impacts.energy)
248
+ if electricity_production == EnergyProduction.NUCLEAR:
249
+ emoji = "☢️"
250
+ name = "Nuclear power plants"
251
+ if electricity_production == EnergyProduction.WIND:
252
+ emoji = "💨️ "
253
+ name = "Wind turbines"
254
+ with gr.Column(scale=1, min_width=300):
255
+ gr.Markdown(f"""
256
+ <h2 align="center">{emoji} $$ \Large {count.magnitude:.0f} $$ {name} <span style="font-size: 12px">(yearly)</span></h2>
257
+ <p align="center"><i>Based on electricity consumption<i></p><br>
258
+ """, latex_delimiters=[{"left": "$$", "right": "$$", "display": False}])
259
+
260
+ ireland_count = format_energy_eq_electricity_consumption_ireland(impacts.energy)
261
+ with gr.Column(scale=1, min_width=300):
262
+ gr.Markdown(f"""
263
+ <h2 align="center">🇮🇪 $$ \Large {ireland_count.magnitude:.2g} $$ x Ireland <span style="font-size: 12px">(yearly ⚡️ cons.)</span></h2>
264
+ <p align="center"><i>Based on electricity consumption<i></p><br>
265
+ """, latex_delimiters=[{"left": "$$", "right": "$$", "display": False}])
266
+
267
+ paris_nyc_airplane = format_gwp_eq_airplane_paris_nyc(impacts.gwp)
268
+ with gr.Column(scale=1, min_width=300):
269
+ gr.Markdown(f"""
270
+ <h2 align="center">✈️ $$ \Large {paris_nyc_airplane.magnitude:,.0f} $$ Paris ↔ NYC </h2>
271
+ <p align="center"><i>Based on GHG emissions<i></p><br>
272
+ """, latex_delimiters=[{"left": "$$", "right": "$$", "display": False}])
273
+
274
+ with gr.Tab("🤓 Expert Mode"):
275
+
276
+ with gr.Row():
277
+ gr.Markdown("# 🤓 Expert mode")
278
+
279
+ model = gr.Dropdown(
280
+ MODELS + [CUSTOM],
281
+ label="Model name",
282
+ value="openai/gpt-3.5-turbo",
283
+ filterable=True,
284
+ interactive=True
285
+ )
286
+ input_model_active_params = gr.Number(
287
+ label="Number of billions of active parameters",
288
+ value=45.0,
289
+ interactive=True
290
+ )
291
+ input_model_total_params = gr.Number(
292
+ label="Number of billions of total parameters",
293
+ value=45.0,
294
+ interactive=True
295
+ )
296
+
297
+ model.change(fn=model_active_params_fn,
298
+ inputs=[model, input_model_active_params],
299
+ outputs=[input_model_active_params])
300
+ model.change(fn=model_total_params_fn,
301
+ inputs=[model, input_model_total_params],
302
+ outputs=[input_model_total_params])
303
+ input_model_active_params.input(fn=custom, outputs=[model])
304
+ input_model_total_params.input(fn=custom, outputs=[model])
305
+
306
+ input_tokens = gr.Number(
307
+ label="Output tokens",
308
+ value=100
309
+ )
310
+
311
+ mix = gr.Dropdown(
312
+ COUNTRY_CODES + [CUSTOM],
313
+ label="Location",
314
+ value="WOR",
315
+ filterable=True,
316
+ interactive=True
317
+ )
318
+ input_mix_gwp = gr.Number(
319
+ label="Electricity mix - GHG emissions [kgCO2eq / kWh]",
320
+ value=IF_ELECTRICITY_MIX_GWP,
321
+ interactive=True
322
+ )
323
+ input_mix_adpe = gr.Number(
324
+ label="Electricity mix - Abiotic resources [kgSbeq / kWh]",
325
+ value=IF_ELECTRICITY_MIX_ADPE,
326
+ interactive=True
327
+ )
328
+ input_mix_pe = gr.Number(
329
+ label="Electricity mix - Primary energy [MJ / kWh]",
330
+ value=IF_ELECTRICITY_MIX_PE,
331
+ interactive=True
332
+ )
333
+
334
+ mix.change(fn=mix_fn,
335
+ inputs=[mix, input_mix_adpe, input_mix_pe, input_mix_gwp],
336
+ outputs=[input_mix_adpe, input_mix_pe, input_mix_gwp])
337
+ input_mix_gwp.input(fn=custom, outputs=mix)
338
+ input_mix_adpe.input(fn=custom, outputs=mix)
339
+ input_mix_pe.input(fn=custom, outputs=mix)
340
+
341
+
342
+ @gr.render(inputs=[
343
+ input_model_active_params,
344
+ input_model_total_params,
345
+ input_tokens,
346
+ input_mix_gwp,
347
+ input_mix_adpe,
348
+ input_mix_pe
349
+ ])
350
+ def render_expert(
351
+ model_active_params,
352
+ model_total_params,
353
+ tokens,
354
+ mix_gwp,
355
+ mix_adpe,
356
+ mix_pe
357
+ ):
358
+ impacts = compute_llm_impacts_expert(
359
+ model_active_parameter_count=model_active_params,
360
+ model_total_parameter_count=model_total_params,
361
+ output_token_count=tokens,
362
+ request_latency=100000,
363
+ if_electricity_mix_gwp=mix_gwp,
364
+ if_electricity_mix_adpe=mix_adpe,
365
+ if_electricity_mix_pe=mix_pe
366
+ )
367
+ impacts = format_impacts(impacts)
368
+
369
+ with gr.Blocks():
370
+
371
+ with gr.Row():
372
+ gr.Markdown(f"""
373
+ <h2 align = "center">Environmental impacts</h2>
374
+ """)
375
+
376
+ with gr.Row():
377
+ with gr.Column(scale=1, min_width=220):
378
+ gr.Markdown(f"""
379
+ <h2 align="center">⚡️ Energy</h2>
380
+ $$ \Large {impacts.energy.magnitude:.3g} \ \large {impacts.energy.units} $$
381
+ <p align="center"><i>Evaluates the electricity consumption<i></p><br>
382
+ """)
383
+ with gr.Column(scale=1, min_width=220):
384
+ gr.Markdown(f"""
385
+ <h2 align="center">🌍️ GHG Emissions</h2>
386
+ $$ \Large {impacts.gwp.magnitude:.3g} \ \large {impacts.gwp.units} $$
387
+ <p align="center"><i>Evaluates the effect on global warming<i></p><br>
388
+ """)
389
+ with gr.Column(scale=1, min_width=220):
390
+ gr.Markdown(f"""
391
+ <h2 align="center">🪨 Abiotic Resources</h2>
392
+ $$ \Large {impacts.adpe.magnitude:.3g} \ \large {impacts.adpe.units} $$
393
+ <p align="center"><i>Evaluates the use of metals and minerals<i></p><br>
394
+ """)
395
+ with gr.Column(scale=1, min_width=220):
396
+ gr.Markdown(f"""
397
+ <h2 align="center">⛽️ Primary Energy</h2>
398
+ $$ \Large {impacts.pe.magnitude:.3g} \ \large {impacts.pe.units} $$
399
+ <p align="center"><i>Evaluates the use of energy resources<i></p><br>
400
+ """)
401
+
402
+ with gr.Blocks():
403
+ with gr.Row():
404
+ gr.Markdown(f"""
405
+ <h2 align="center">How can location impact the footprint ?</h2>
406
+ """)
407
+ with gr.Row():
408
+ def create_static_bar_plot():
409
+ categories = ['Sweden', 'France', 'Canada', 'USA', 'China', 'Australia', 'India']
410
+ values = [46, 81, 238, 679, 1057, 1123, 1583]
411
+
412
+ def addlabels(x,y):
413
+ for i in range(len(x)):
414
+ plt.text(i, y[i], y[i], ha = 'center')
415
+
416
+ fig, ax = plt.subplots(figsize=(15,5), facecolor='#1F2937')
417
+ ax.bar(categories, values)
418
+ #ax.set_xlabel('Countries')
419
+ ax.set_ylabel('GHG emissions (gCO2eq) for 1kWh')
420
+ ax.set_title('GWP emissions for 1 kWh of electricity consumption')
421
+ ax.set_facecolor("#0B0F19")
422
+
423
+ addlabels(categories, values)
424
+
425
+ font = {'family' : 'monospace',
426
+ 'weight' : 'normal',
427
+ 'size' : 14}
428
+
429
+ matplotlib.rc('font', **font)
430
+ matplotlib.rcParams.update({'text.color':'white',
431
+ 'axes.labelcolor':'white',
432
+ 'xtick.color':'white',
433
+ 'ytick.color':'white'})
434
+
435
+ return fig
436
+
437
+ static_plot = gr.Plot(value=create_static_bar_plot())
438
+
439
+ with gr.Tab("🔍 Evaluate your own usage"):
440
+
441
+ with gr.Row():
442
+ gr.Markdown("""
443
+ # 🔍 Evaluate your own usage
444
+ ⚠️ For now, only ChatGPT conversation import is available.
445
+ You can always try out other models - however results might be inaccurate due to fixed parameters, such as tokenization method.
446
+ """)
447
+
448
+ def process_input(text):
449
+
450
+ r = requests.get(text, verify=False)
451
+
452
+ soup = BeautifulSoup(r.text, "html.parser")
453
+ list_text = str(soup).split('parts":["')
454
+ s = ''
455
+ for item in list_text[1:int(len(list_text)/2)]:
456
+ if list_text.index(item)%2 == 1:
457
+ s = s + item.split('"]')[0]
458
+
459
+ amout_token = tiktoken_len(s)
460
+
461
+ return amout_token
462
+
463
+ def compute_own_impacts(amount_token, model):
464
+ provider = model.split('/')[0].lower()
465
+ model = model.split('/')[1]
466
+ impacts = compute_llm_impacts(
467
+ provider=provider,
468
+ model_name=model,
469
+ output_token_count=amount_token,
470
+ request_latency=100000
471
+ )
472
+
473
+ impacts = format_impacts(impacts)
474
+
475
+ energy = f"""
476
+ <h2 align="center">⚡️ Energy</h2>
477
+ $$ \Large {impacts.energy.magnitude:.3g} \ \large {impacts.energy.units} $$
478
+ <p align="center"><i>Evaluates the electricity consumption<i></p><br>
479
+ """
480
+
481
+ gwp = f"""
482
+ <h2 align="center">🌍️ GHG Emissions</h2>
483
+ $$ \Large {impacts.gwp.magnitude:.3g} \ \large {impacts.gwp.units} $$
484
+ <p align="center"><i>Evaluates the effect on global warming<i></p><br>
485
+ """
486
+
487
+ adp = f"""
488
+ <h2 align="center">🪨 Abiotic Resources</h2>
489
+ $$ \Large {impacts.adpe.magnitude:.3g} \ \large {impacts.adpe.units} $$
490
+ <p align="center"><i>Evaluates the use of metals and minerals<i></p><br>
491
+ """
492
+
493
+ pe = f"""
494
+ <h2 align="center">⛽️ Primary Energy</h2>
495
+ $$ \Large {impacts.pe.magnitude:.3g} \ \large {impacts.pe.units} $$
496
+ <p align="center"><i>Evaluates the use of energy resources<i></p><br>
497
+ """
498
+
499
+ return energy, gwp, adp, pe
500
+
501
+ def combined_function(text, model):
502
+ n_token = process_input(text)
503
+ energy, gwp, adp, pe = compute_own_impacts(n_token, model)
504
+ return n_token, energy, gwp, adp, pe
505
+
506
+ with gr.Blocks():
507
+
508
+ text_input = gr.Textbox(label="Paste the URL here (must be on https://chatgpt.com/share/xxxx format)")
509
+ model = gr.Dropdown(
510
+ MODELS,
511
+ label="Model name",
512
+ value="openai/gpt-4o",
513
+ filterable=True,
514
+ interactive=True
515
+ )
516
+
517
+ process_button = gr.Button("Estimate this usage footprint")
518
+
519
+ with gr.Accordion("ℹ️ Infos", open=False):
520
+ n_token = gr.Textbox(label="Total amount of tokens :")
521
+
522
+ with gr.Row():
523
+ with gr.Column(scale=1, min_width=220):
524
+ energy = gr.Markdown()
525
+ with gr.Column(scale=1, min_width=220):
526
+ gwp = gr.Markdown()
527
+ with gr.Column(scale=1, min_width=220):
528
+ adp = gr.Markdown()
529
+ with gr.Column(scale=1, min_width=220):
530
+ pe = gr.Markdown()
531
+
532
+ process_button.click(
533
+ fn=combined_function,
534
+ inputs=[text_input, model],
535
+ outputs=[n_token, energy, gwp, adp, pe]
536
+ )
537
+
538
+ with gr.Tab("📖 Methodology"):
539
+ gr.Markdown(METHODOLOGY_TEXT,
540
+ elem_classes="descriptive-text",
541
+ latex_delimiters=[
542
+ {"left": "$$", "right": "$$", "display": True},
543
+ {"left": "$", "right": "$", "display": False}
544
+ ])
545
+
546
+ with gr.Tab("ℹ️ About"):
547
+ gr.Markdown(ABOUT_TEXT, elem_classes="descriptive-text",)
548
+
549
+ with gr.Accordion("📚 Citation", open=False):
550
+ gr.Textbox(
551
+ value=CITATION_TEXT,
552
+ label=CITATION_LABEL,
553
+ interactive=False,
554
+ show_copy_button=True,
555
+ lines=len(CITATION_TEXT.split('\n')),
556
+ )
557
+
558
+ # License
559
+ gr.Markdown(LICENCE_TEXT)
560
+
561
+ if __name__ == '__main__':
562
+ demo.launch()
requirements-dev.txt CHANGED
@@ -1,3 +1,6 @@
1
- gradio
2
- ecologits==0.1.7
3
- pint
 
 
 
 
1
+ gradio
2
+ ecologits==0.1.7
3
+ pint
4
+ beautifulsoup4
5
+ requests
6
+ tiktoken
requirements.txt CHANGED
@@ -1,2 +1,5 @@
1
- ecologits==0.1.7
2
- pint
 
 
 
 
1
+ ecologits==0.1.7
2
+ pint
3
+ beautifulsoup4
4
+ requests
5
+ tiktoken