summaryrefslogtreecommitdiff
path: root/bjoern/videoanalyse/LogParser.py
blob: 448b283ad273f4afb796ee2d2b5a2b5121269532 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
# -*- coding: utf-8 -*-
"""
This script takes multiple .log data files from PCScreenWatcher and a Firefox history database (places.sqlite)
inside a folder to extract each activity title, their url if applicable and usage timestamps into a pseudo-csv file.
"""

import os
import re
from datetime import datetime
import csv
import codecs
from read_sqlite import get_url_from_sqlite
from pathlib import Path

BROWSER_TITLE_SUFFIX = " - Mozilla Firefox"


# takes the log data string and returns a list of activity titles and their time windows
def extract_activities(log_data):
    # regex which matches between squared brackets
    reg_titles = re.compile("(?<=\[).*?(?=\])")
    # regex for total/active time
    reg_time = re.compile(
        "(?<!\()Total: \d*\.*\d*m*\d*\.*\d*s*, Active: \d*\.*\d*m*\d*\.*\d*s*"
    )
    # regex which matches number + . and splits into different strings
    reg_split = re.compile("\d+\. ")

    extracted_data = {}
    for log_date in log_data:
        windows = reg_split.split(log_date)
        brackets = []

        del_flag = True
        # extract squared brackets per string
        for s in windows:
            if not del_flag:
                found_brackets = reg_titles.findall(s)
                if found_brackets:
                    # Only use Browser titles
                    if found_brackets[0].endswith(BROWSER_TITLE_SUFFIX):
                        title = found_brackets[0].replace(BROWSER_TITLE_SUFFIX, "")
                        brackets.append(title)
                        enter_exit = s.split("Enter-Exit: ")[-1]
                        timestamps = reg_titles.findall(enter_exit)
                        for timestamp in timestamps:
                            t_enter, t_exit = timestamp.split("-")
                            if not title in extracted_data:
                                extracted_data[title] = []
                            enter_date = datetime.strptime(t_enter, "%H:%M:%S").time()
                            exit_date = datetime.strptime(t_exit, "%H:%M:%S").time()
                            extracted_data[title].append((enter_date, exit_date))
            if "Activies in each window" in s:
                del_flag = False
            if "Restricted Windows Summary:" in s:
                del_flag = True

    return extracted_data


def get_log_data(data_path):
    files = os.listdir(data_path)
    log_files = []
    log_data = []
    for s in files:
        if s.endswith(".log"):
            log_files.append(os.path.join(data_path, s))
    for l in log_files:
        with codecs.open(l, "r", "utf-8") as reader:
            log_data.append(reader.read())
    return log_data


def get_history_db(data_path):
    files = os.listdir(data_path)
    for s in files:
        if s.endswith(".sqlite"):
            history_db = os.path.join(data_path, s)
            return history_db
    return None


def match_urls(history_db, log):
    for entry in log:
        url = get_url_from_sqlite(history_db, entry[2])
        entry.append(url)
    return log

def generate_log(activities: dict):
    # For each start time in ascending order, make an entry with title and timestamp
    log = []
    while activities:
        first_title = list(activities.keys())[0]
        smallest_start_time = (first_title, 0)
        for title in activities.keys():
            for idx, timestamp in enumerate(activities[title]):
                if (
                    timestamp[0]
                    < activities[smallest_start_time[0]][smallest_start_time[1]][0]
                ):
                    smallest_start_time = (title, idx)
        log.append(
            [
                activities[smallest_start_time[0]][smallest_start_time[1]][0].isoformat(),
                activities[smallest_start_time[0]][smallest_start_time[1]][1].isoformat(),
                smallest_start_time[0],
            ]
        )
        del activities[smallest_start_time[0]][smallest_start_time[1]]
        if not activities[smallest_start_time[0]]:
            del activities[smallest_start_time[0]]
    return(log)


for vp_dir in [f.name for f in os.scandir() if f.is_dir()]:
    print(vp_dir)
    log = extract_activities(get_log_data(vp_dir))
    log = generate_log(log)

    history = get_history_db(vp_dir)
    log = match_urls(history, log)

    path = Path(f"{vp_dir}/{vp_dir}.csv")
    with open(path, "w") as csvfile:
        writer = csv.writer(csvfile, delimiter=",", quoting=csv.QUOTE_NONNUMERIC)
        writer.writerow(["Starttime", "Endtime", "Title", "URL"])
        for row in log:
            writer.writerow(row)


input("*Press enter to close*")