Skip to content

Instantly share code, notes, and snippets.

View j1o1h1n's full-sized avatar

John Lehmann j1o1h1n

  • Sydney, Australia
View GitHub Profile
@j1o1h1n
j1o1h1n / dict.py
Created August 27, 2024 13:22
This is a simple text UI for a dictionary. Using it will require a sqlite database with the dictionary words and definitions (not provided)
import time
import sqlite3
from dataclasses import dataclass
from functools import partial
from textual import on
from textual.app import App, ComposeResult
from textual.command import Hit, Hits, Provider
from textual.widgets import Header, TextArea, Input, ContentSwitcher
@j1o1h1n
j1o1h1n / visitfiles.py
Created March 29, 2023 01:01
visit files using os.walk and pathlib
import pathlib
import os
def visitfiles(top, visit, topdown=True, onerror=None, followlinks=False):
for root, dirs, files in os.walk(top, topdown, onerror, followlinks):
for file in files:
p = pathlib.Path(root) / file
visit(p)
@j1o1h1n
j1o1h1n / words.css
Created August 31, 2022 10:00
A dictionary TUI written in python using the textual library - see https://asciinema.org/a/518081
Button {
padding-left: 1;
padding-right: 1;
}
TextInput {
layout: horizontal;
height: 3;
background: $panel-darken-1;
border: tall $panel-darken-2;
@j1o1h1n
j1o1h1n / console.py
Last active March 3, 2022 19:44
This is a demo of an interactive console in a Textual user interface.
from __future__ import annotations
import string
import code
import sys
import io
from typing import Callable
from textual.app import App
from textual.widgets import Header, ScrollView
def sent_tokenize(text, tokenizer):
return tokenizer.tokenize(text)
def word_tokenize(text, tokenizer, preserve_line=False):
sentences = [text] if preserve_line else sent_tokenize(text, tokenizer)
return [
token for sent in sentences for token in _treebank_word_tokenizer.tokenize(sent)
]
import pandas as pd
import numpy as np
import sklearn
from nltk import corpus
from wordcloud import WordCloud
from nltk.corpus import twitter_samples

positive_tweets = twitter_samples.strings('positive_tweets.json')
negative_tweets = twitter_samples.strings('negative_tweets.json')
text = twitter_samples.strings('tweets.20150430-223406.json')
tweet_tokens = twitter_samples.tokenized('positive_tweets.json')