File size: 2,441 Bytes
263682e
232f7f2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
263682e
7aa68df
 
263682e
7aa68df
 
 
263682e
7aa68df
 
 
232f7f2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
263682e
 
 
 
7aa68df
 
263682e
 
232f7f2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
import requests, base64
from bs4 import BeautifulSoup
import re
import gradio as gr

def get_blocked_urls():
  """
    Get a list of blocked URLs.

    Returns:
      list: A list of blocked URLs.

    Raises:
      None.
  """
  url = 'https://colab.research.google.com/'
  r = requests.get(url)
  if r.status_code == 200:
    result = []
    soup = BeautifulSoup(r.text, 'html.parser')
    # search for script that contains "external_polymer_binary" in attr
    for script in soup.find_all('script'):
      if "external_polymer_binary" in str(script):
        
        r_js = requests.get(script['src'])
        # print(r_js.text)

        pattern = r"'(.*?)webui(.*?)'"
        match = re.search(pattern, r_js.text)
        raw_string = match.group(0)

        # trim 1 char front and back, split the text with ';' into array
        raw_string = raw_string[1:-1].split(';')
        result = raw_string
        for i in range(len(result)):
          decodedurl = result[i]
          repeats = 0
          try:
            for _ in range(10):
              decodedurl = base64.b64decode(f"{decodedurl}========================================================").decode('utf-8') # this took 2 hours to figure out
              repeats += 1
          except:
            pass
          if decodedurl != result[i]:
            result[i] = f"{result[i]} < {decodedurl} x{repeats}>[thisisb64]"

    if len(result) > 0:
      return (result)
    else:
      return (["failed :<"])

  else:
    return (["res code: "+r.status_code])


def handle_refresh():
  """
  Generates an HTML ordered list of blocked URLs.

  Returns:
    str: The HTML string containing the ordered list of blocked URLs.
  """
  xs = "<ol>"
  for url in get_blocked_urls():
    if "[thisisb64]" in url:
      url = url.replace("[thisisb64]", "")
      nondecoded = url.split('<')[0]
      decodedurl = url.split('<')[1]
      decodedurl = f"&lt;{decodedurl.replace('>', '&gt;')}"
      xs += '<li><code>'+nondecoded+'</code>' + '<p style="color: #4EACEF">'+decodedurl+'</p></li>'
    else:
      xs += "<li><code>"+url+"</code></li>"
  xs += "</ol>"
  return xs
    
  

with gr.Blocks(
    analytics_enabled=False, title="GGL Checks", theme="NoCrypt/miku"
) as demo:
    gr.HTML("""<center><h1>GGL Checks</h1></center>""")
    refresh = gr.Button("Refresh", variant="primary")
    html = gr.HTML()
    refresh.click(handle_refresh, outputs=[html])


demo.launch(debug=True)