This commit is contained in:
Price Hiller 2024-01-25 13:42:44 -06:00
parent 344a36aa31
commit 41e7525608
Signed by: Price
SSH Key Fingerprint: SHA256:Y4S9ZzYphRn1W1kbJerJFO6GGsfu9O70VaBSxJO7dF8
14 changed files with 742 additions and 40 deletions

View File

@ -0,0 +1,6 @@
#+FILETAGS: :college:cs1011:
** TODO Group Project
SCHEDULED: <2024-01-22 Mon> DEADLINE: <2024-02-02 Fri>
- Sign up for a Group under the =People= tab in Canvas
- Due in 2 weeks from today

View File

@ -0,0 +1,20 @@
#+FILETAGS: :college:cs1011:
* Lecture 1 Introduction (What is Computer Science)
[2024-01-22]
** CS 1011 Topics
- What is Computer Science
- How Computers Work
- Data And Resource Management
- Networks and Internet
- Security
- Artificial Intelligence
- Diversity and Social Impact
- Might not get to this
** Group Project
- Sign up for a Group under the =People= tab in Canvas

4
Spring-2023/CS-2124/.gitignore vendored Normal file
View File

@ -0,0 +1,4 @@
*/**/CMakeFiles
*/**/CMakeCache.txt
*/**/cmake_install.cmake
*.ignore

Binary file not shown.

View File

@ -0,0 +1,13 @@
cmake_minimum_required(VERSION 3.25)
project(dynamic-memory LANGUAGES "C")
file(GLOB_RECURSE dynamic-memory-sources CONFIGURE_DEPENDS "progs/dynamic-memory/src/*.c" "progs/dynamic-memory/src/*.h")
add_executable(dynamic-memory ${dynamic-memory-sources})
project(file-operations LANGUAGES "C")
file(GLOB_RECURSE file-operations-sources CONFIGURE_DEPENDS "progs/file-operations/src/*.c" "progs/file-operations/src/*.h")
add_executable(file-operations ${file-operations-sources})
project(string-manipulation LANGUAGES "C")
file(GLOB_RECURSE string-manipulation-sources CONFIGURE_DEPENDS "progs/string-manipulation/src/*.c" "progs/string-manipulation/src/*.h")
add_executable(string-manipulation ${string-manipulation-sources})

View File

@ -0,0 +1,265 @@
# CMAKE generated file: DO NOT EDIT!
# Generated by "Unix Makefiles" Generator, CMake Version 3.27
# Default target executed when no arguments are given to make.
default_target: all
.PHONY : default_target
# Allow only one "make -f Makefile2" at a time, but pass parallelism.
.NOTPARALLEL:
#=============================================================================
# Special targets provided by cmake.
# Disable implicit rules so canonical targets will work.
.SUFFIXES:
# Disable VCS-based implicit rules.
% : %,v
# Disable VCS-based implicit rules.
% : RCS/%
# Disable VCS-based implicit rules.
% : RCS/%,v
# Disable VCS-based implicit rules.
% : SCCS/s.%
# Disable VCS-based implicit rules.
% : s.%
.SUFFIXES: .hpux_make_needs_suffix_list
# Command-line flag to silence nested $(MAKE).
$(VERBOSE)MAKESILENT = -s
#Suppress display of executed commands.
$(VERBOSE).SILENT:
# A target that is always out of date.
cmake_force:
.PHONY : cmake_force
#=============================================================================
# Set environment variables for the build.
# The shell in which to execute make rules.
SHELL = /bin/sh
# The CMake executable.
CMAKE_COMMAND = /nix/store/wn9nlnmyfd1x6ps3zmy04yxjyw3iji86-cmake-3.27.8/bin/cmake
# The command to remove a file.
RM = /nix/store/wn9nlnmyfd1x6ps3zmy04yxjyw3iji86-cmake-3.27.8/bin/cmake -E rm -f
# Escaping for special characters.
EQUALS = =
# The top-level source directory on which CMake was run.
CMAKE_SOURCE_DIR = /home/sam/Git/College/Spring-2023/CS-2124/Assignment-1
# The top-level build directory on which CMake was run.
CMAKE_BINARY_DIR = /home/sam/Git/College/Spring-2023/CS-2124/Assignment-1
#=============================================================================
# Targets provided globally by CMake.
# Special rule for the target edit_cache
edit_cache:
@$(CMAKE_COMMAND) -E cmake_echo_color "--switch=$(COLOR)" --cyan "No interactive CMake dialog available..."
/nix/store/wn9nlnmyfd1x6ps3zmy04yxjyw3iji86-cmake-3.27.8/bin/cmake -E echo No\ interactive\ CMake\ dialog\ available.
.PHONY : edit_cache
# Special rule for the target edit_cache
edit_cache/fast: edit_cache
.PHONY : edit_cache/fast
# Special rule for the target rebuild_cache
rebuild_cache:
@$(CMAKE_COMMAND) -E cmake_echo_color "--switch=$(COLOR)" --cyan "Running CMake to regenerate build system..."
/nix/store/wn9nlnmyfd1x6ps3zmy04yxjyw3iji86-cmake-3.27.8/bin/cmake --regenerate-during-build -S$(CMAKE_SOURCE_DIR) -B$(CMAKE_BINARY_DIR)
.PHONY : rebuild_cache
# Special rule for the target rebuild_cache
rebuild_cache/fast: rebuild_cache
.PHONY : rebuild_cache/fast
# The main all target
all: cmake_check_build_system
$(CMAKE_COMMAND) -E cmake_progress_start /home/sam/Git/College/Spring-2023/CS-2124/Assignment-1/CMakeFiles /home/sam/Git/College/Spring-2023/CS-2124/Assignment-1//CMakeFiles/progress.marks
$(MAKE) $(MAKESILENT) -f CMakeFiles/Makefile2 all
$(CMAKE_COMMAND) -E cmake_progress_start /home/sam/Git/College/Spring-2023/CS-2124/Assignment-1/CMakeFiles 0
.PHONY : all
# The main clean target
clean:
$(MAKE) $(MAKESILENT) -f CMakeFiles/Makefile2 clean
.PHONY : clean
# The main clean target
clean/fast: clean
.PHONY : clean/fast
# Prepare targets for installation.
preinstall: all
$(MAKE) $(MAKESILENT) -f CMakeFiles/Makefile2 preinstall
.PHONY : preinstall
# Prepare targets for installation.
preinstall/fast:
$(MAKE) $(MAKESILENT) -f CMakeFiles/Makefile2 preinstall
.PHONY : preinstall/fast
# clear depends
depend:
$(CMAKE_COMMAND) -P /home/sam/Git/College/Spring-2023/CS-2124/Assignment-1/CMakeFiles/VerifyGlobs.cmake
$(CMAKE_COMMAND) -S$(CMAKE_SOURCE_DIR) -B$(CMAKE_BINARY_DIR) --check-build-system CMakeFiles/Makefile.cmake 1
.PHONY : depend
#=============================================================================
# Target rules for targets named dynamic-memory
# Build rule for target.
dynamic-memory: cmake_check_build_system
$(MAKE) $(MAKESILENT) -f CMakeFiles/Makefile2 dynamic-memory
.PHONY : dynamic-memory
# fast build rule for target.
dynamic-memory/fast:
$(MAKE) $(MAKESILENT) -f CMakeFiles/dynamic-memory.dir/build.make CMakeFiles/dynamic-memory.dir/build
.PHONY : dynamic-memory/fast
#=============================================================================
# Target rules for targets named file-operations
# Build rule for target.
file-operations: cmake_check_build_system
$(MAKE) $(MAKESILENT) -f CMakeFiles/Makefile2 file-operations
.PHONY : file-operations
# fast build rule for target.
file-operations/fast:
$(MAKE) $(MAKESILENT) -f CMakeFiles/file-operations.dir/build.make CMakeFiles/file-operations.dir/build
.PHONY : file-operations/fast
#=============================================================================
# Target rules for targets named string-manipulation
# Build rule for target.
string-manipulation: cmake_check_build_system
$(MAKE) $(MAKESILENT) -f CMakeFiles/Makefile2 string-manipulation
.PHONY : string-manipulation
# fast build rule for target.
string-manipulation/fast:
$(MAKE) $(MAKESILENT) -f CMakeFiles/string-manipulation.dir/build.make CMakeFiles/string-manipulation.dir/build
.PHONY : string-manipulation/fast
progs/dynamic-memory/src/main.o: progs/dynamic-memory/src/main.c.o
.PHONY : progs/dynamic-memory/src/main.o
# target to build an object file
progs/dynamic-memory/src/main.c.o:
$(MAKE) $(MAKESILENT) -f CMakeFiles/dynamic-memory.dir/build.make CMakeFiles/dynamic-memory.dir/progs/dynamic-memory/src/main.c.o
.PHONY : progs/dynamic-memory/src/main.c.o
progs/dynamic-memory/src/main.i: progs/dynamic-memory/src/main.c.i
.PHONY : progs/dynamic-memory/src/main.i
# target to preprocess a source file
progs/dynamic-memory/src/main.c.i:
$(MAKE) $(MAKESILENT) -f CMakeFiles/dynamic-memory.dir/build.make CMakeFiles/dynamic-memory.dir/progs/dynamic-memory/src/main.c.i
.PHONY : progs/dynamic-memory/src/main.c.i
progs/dynamic-memory/src/main.s: progs/dynamic-memory/src/main.c.s
.PHONY : progs/dynamic-memory/src/main.s
# target to generate assembly for a file
progs/dynamic-memory/src/main.c.s:
$(MAKE) $(MAKESILENT) -f CMakeFiles/dynamic-memory.dir/build.make CMakeFiles/dynamic-memory.dir/progs/dynamic-memory/src/main.c.s
.PHONY : progs/dynamic-memory/src/main.c.s
progs/file-operations/src/main.o: progs/file-operations/src/main.c.o
.PHONY : progs/file-operations/src/main.o
# target to build an object file
progs/file-operations/src/main.c.o:
$(MAKE) $(MAKESILENT) -f CMakeFiles/file-operations.dir/build.make CMakeFiles/file-operations.dir/progs/file-operations/src/main.c.o
.PHONY : progs/file-operations/src/main.c.o
progs/file-operations/src/main.i: progs/file-operations/src/main.c.i
.PHONY : progs/file-operations/src/main.i
# target to preprocess a source file
progs/file-operations/src/main.c.i:
$(MAKE) $(MAKESILENT) -f CMakeFiles/file-operations.dir/build.make CMakeFiles/file-operations.dir/progs/file-operations/src/main.c.i
.PHONY : progs/file-operations/src/main.c.i
progs/file-operations/src/main.s: progs/file-operations/src/main.c.s
.PHONY : progs/file-operations/src/main.s
# target to generate assembly for a file
progs/file-operations/src/main.c.s:
$(MAKE) $(MAKESILENT) -f CMakeFiles/file-operations.dir/build.make CMakeFiles/file-operations.dir/progs/file-operations/src/main.c.s
.PHONY : progs/file-operations/src/main.c.s
progs/string-manipulation/src/main.o: progs/string-manipulation/src/main.c.o
.PHONY : progs/string-manipulation/src/main.o
# target to build an object file
progs/string-manipulation/src/main.c.o:
$(MAKE) $(MAKESILENT) -f CMakeFiles/string-manipulation.dir/build.make CMakeFiles/string-manipulation.dir/progs/string-manipulation/src/main.c.o
.PHONY : progs/string-manipulation/src/main.c.o
progs/string-manipulation/src/main.i: progs/string-manipulation/src/main.c.i
.PHONY : progs/string-manipulation/src/main.i
# target to preprocess a source file
progs/string-manipulation/src/main.c.i:
$(MAKE) $(MAKESILENT) -f CMakeFiles/string-manipulation.dir/build.make CMakeFiles/string-manipulation.dir/progs/string-manipulation/src/main.c.i
.PHONY : progs/string-manipulation/src/main.c.i
progs/string-manipulation/src/main.s: progs/string-manipulation/src/main.c.s
.PHONY : progs/string-manipulation/src/main.s
# target to generate assembly for a file
progs/string-manipulation/src/main.c.s:
$(MAKE) $(MAKESILENT) -f CMakeFiles/string-manipulation.dir/build.make CMakeFiles/string-manipulation.dir/progs/string-manipulation/src/main.c.s
.PHONY : progs/string-manipulation/src/main.c.s
# Help Target
help:
@echo "The following are some of the valid targets for this Makefile:"
@echo "... all (the default if no target is provided)"
@echo "... clean"
@echo "... depend"
@echo "... edit_cache"
@echo "... rebuild_cache"
@echo "... dynamic-memory"
@echo "... file-operations"
@echo "... string-manipulation"
@echo "... progs/dynamic-memory/src/main.o"
@echo "... progs/dynamic-memory/src/main.i"
@echo "... progs/dynamic-memory/src/main.s"
@echo "... progs/file-operations/src/main.o"
@echo "... progs/file-operations/src/main.i"
@echo "... progs/file-operations/src/main.s"
@echo "... progs/string-manipulation/src/main.o"
@echo "... progs/string-manipulation/src/main.i"
@echo "... progs/string-manipulation/src/main.s"
.PHONY : help
#=============================================================================
# Special targets to cleanup operation of make.
# Special rule to run CMake to check the build system integrity.
# No rule that depends on this can have commands that come from listfiles
# because they might be regenerated.
cmake_check_build_system:
$(CMAKE_COMMAND) -P /home/sam/Git/College/Spring-2023/CS-2124/Assignment-1/CMakeFiles/VerifyGlobs.cmake
$(CMAKE_COMMAND) -S$(CMAKE_SOURCE_DIR) -B$(CMAKE_BINARY_DIR) --check-build-system CMakeFiles/Makefile.cmake 0
.PHONY : cmake_check_build_system

View File

@ -0,0 +1,4 @@
#include <stdio.h>
int main(void)
{
}

View File

@ -0,0 +1,25 @@
#include <stdio.h>
#include <stdlib.h>
int main(void)
{
printf("1.) Creation of a new file:\n");
char fname[] = {"p", "r", "i", "c", "e", ".", "t", "x", "t"};
FILE *f = fopen(fname, "w+");
printf("Filename: %s\nLocation: %s\n", fname, realpath(fname, NULL));
}
/**
* @brief Create a new file and output the file name and location of the file to STDOUT
*/
void create_new_file(char* filename) {
}
/**
* @brief Open and close an existing file and output the location of the file to STDOUT
*/
void open_and_close_existing_file() {}
/**
* @brief Open and write
*/
void write_to_a_file() {}

View File

@ -0,0 +1,4 @@
int main(void)
{
return 0;
}

View File

@ -1,5 +1,6 @@
* Lecture 1 [2024-01-16 Tue]
* Lecture 1
[2024-01-16 Tue]
** Recommended Books
@ -87,9 +88,8 @@
task.
- All Algorithms must satisfy the following criteria:
1. Input
2. Output
3. Definiteness
4. Finiteness
2. Output 3. Defi
5. Effectiveness
*** How Programs Solve Problems
@ -102,3 +102,337 @@
- Intrusion Detection Systems can rely on heuristics to identify attacks
- Heuristics are basically identified patterns or elements to assist in creating a solution to
some problem
* Lecture 2
[2024-01-23 Tue]
- Time and Space Complexity
- Intro to Asymptotic Notations
- Big O Notation
- Searching
- Binary Search
- Linear Search
** Time and Space Complexity
*** Time Complexity
- Time taken by an algorithm for execution
- Process of determining how processing time increases as the /size of the problem/ (input size)
increases
- Generally time complexity is expressed by keeping only the /values which affects runtime most/.
- *For example*, if time complexity for a pgrogram needs to be calculated as a function of (i.e.
$n^4 + n^3 + n^2 + n = n^4$) as all terms are small and they have a lesser impact on overall
computation time when compared with $n^4$
*** Space Complexity
- Memory required by an algorithm to execute a program
- Space complexity is the total amount of /memory space used/ by an algorithm/pgroam including
the space of input values for execution
*** Time and Space Complexity Importance
- Intrusion Detection Systems must process gigabits of data or more with minimal latency, time
complexity is important for this
- Handling potentially petabytes of data, memory complexity is important here
*** Data Structures
- Data Structures are necessary for designing efficient algorithms
- Provides reusability and abstraction
- Appropriate data structures helps programmers save time and space
- Assists in optimizing data manipulation (i.e. add, remove, edit large amounts of data)
- For example: A tree based data structure is ideal for college course info storage in a program
- Determining what courses need to be taken, courses have "dependencies" on other courses that
could be more than a single course dependency
*** Intro to Asymptotic Notations
- Tells us how good an algorithm is when compared with another algorithm
- Parameters can play a part
- i.e. hardware used for implementation, Operating System, CPU model, processor generation,
etc.
- Therefore, we use Asymptotic analysis to compare space and time complexity
- Analyzes two algorithms based on changes in their performance concerning the increment or
decrement in the input size
- *Big O* ($O()$) describes the upper bound of complexity
- Worst case scenario
- Runtime usually depends on the size of the input:
- $T(n)$: /the time taken on an input of size $n$.
- Asymptotic analysis considers the growth of $T(n)$
*** Big O Notation
- Worst case scenario (analyzes algorithm's upper bound)
- Best-case scenario is not considered for use in a comparative analysis
- That's why we employ worst-case scenarios to get meaningful input
- Algorithm in data structure while programming code is critical
- Big O makes it easier to compare algorithms
- *Big O* notation, $O(g(n))$ is a collection of functions
- A function $f(n)$ is a member of that collection only if it fits the following criteria:
- Constant $c$ and $n_0$ exist where $f(n) <= c.g(n)$ for all $n >= n_0$ Where '$c$'
represents constant values
- $O(f(n))$ describes the upper bound of $f(n)$, /worst-case scenario/
- $\omega(f(n))$ describes the lower bound of $f(n)$, /best-case scenario/
**** Why Do We Need Big O?
- World we live in today consists of complicated apps & software, each running various devices
and each has different capabilities
- Some devices like desktops can run heavy machine learning software, but others like phones can
only run apps
- WHen you create an application, you'll need to optimize your code so that it runs smoothly
across devices to give you an edge over your competitors
**** Computing Big O Notation
- *Big-O* Asymptotic Notation gives up Upper Bound:
1. Determine what the input is and what '$n$' represents (i.e. $f(n)=O(g(n))$)
2. Identify maximum number of operations the algorithm performs in terms of '$n$'. (i.e.
addition of two numbers is just 1 operation)
3. Eliminate all excluding the highest order terms. (i.e. if you have $n^4$ and $n^3$ consider
only $n^4$)
4. Remove all constant factors. Constants will remain /constant/ regardless of user input
- Basically *Big-O* is used to measure and compare worst-case scenarios of algorithms
**** Example Big O Notations
| Big O Notation | Example |
|-------------------------|--------------------------------|
| Constant: $O(c)$ | $O(1)$ |
| Logarithmic $O(log(n))$ | $n=20$ means $log(20) = 2.996$ |
| Linear: $O(n)$ | $n=20$ means $20$ |
| Quadratic: $O(n^2)$ | $n=20$ means $20^2 = 400$ |
| Exponential: $O(2^n)$ | $n=20$ means $2^20$ = 1084576 |
| Factorial: $O(n!)$ | $N=20$ means $20!$ |
**** Example Program
#+begin_src c
#include <stdio.h>
int main() {
int n;
printf("N = ");
scanf("%d", &n);
printf("Got: %d\n", n);
int a[n];
for (int i = 0; i < n; i++)
printf("a[%d] = %d \n", i, a[n]=i-1)
}
#+end_src
- Big O of this is $O(n)$
*** Searching
- Searching in data structures refers to the process of finding the location of an element in a
list
- One of the important parts of many data structures algorithms, as one operation can be
performed on an element if and only if we find it
- We do not want searching to take '$n$' steps for searching an array of '$n$' number of
elements
- In some cases we are bound to take '$n$' steps
- Different algorithms try to minimize the number of steps to search an element
**** Binary Search
- Divide and conquer approach
- Requires the data to be sorted
- In sequential search, when we compare against the first item, there are at most more items to
look through if the first item is not what we are looking for
- Instead of searching the list in sequence, a /binary search/ will start by examining the
middle term
- If that term is the one we are searching for, we are done
- If it is not the correct term, we can use the ordered nature of the list to eliminate half
of the remaining items
- If the term we are searching for is greater then the middle item, we know that the entire
lower half of the list as well as the middle item can be eliminated from further
consideration
- The term, if it is the list, must be in the upper half
***** Algorithm Steps
#+begin_src python
def binary_search(arr: list[int], term: int, low: int, high: int) -> int:
mid = (low + high)/2
while low <= high:
if (low > high):
raise("Unable to find the search term!")
else if (arr[mid] < term):
low = mid + 1
else if (arr[mid] == term):
return mid
else
high = mid - 1;
mid = (low + high) / 2
binary_search([0, 1, 2, 3, 4], 0, 0, 5) # Outputs: 5
#+end_src
**** Linear Search
- The Linear Search (sequential search) algorithm starts at one end of a list and goes through
each element of a list until the desired element is found, otherwise the search continues till the
end of the data set
- Does not require data to be sorted
- Poor *Big-O* complexity: $O(n)$
#+begin_src python
def linear_search(arr: list[int], term: int) -> int:
for i in range(0, len(arr)):
if arr[i] == term:
return i
raise(f"Unable to find {term} in array!")
#+end_src
* Lecture 2
** Big O Notation
*** $O(log(n))$
- Divide and conquer
- If the base is not specified in CS, assume a base of $2$: $O(log_2(n))$
- Binary search is an algorithm that is of $O(log(n))$ complexity
** Sorting Algorithms
- Sorting refers to arranging data in a particular format
- Many search algorithms depend on sorted data, hence /sorting/
- In general there are *2 approaches* to sort an array of elements:
1. Some algorithms work by moving elements to their final position, one at a time. You sort an
array of size N, put 1 item in place, and continue sorting an array of size N - 1.
- Memory efficient
- Performance inefficient
2. Some algorithms put items into a temporary position, close(r) to their final position. YOu
rescan, moving items closer to the final position with each iteration.
- Memory inefficient
- Performance efficient
*** Complexity and Running Time
- Factors:
1. Algorithmic complexity
2. Additional space requirements
3. Use of recursion
- Have to be careful with recursion, can easily spiral out into an $b^n$ complexity
4. Worst-case behavior
- Worst-case behavior is important for real-time systems that need guaranteed performance
5. Behavior on already-sorted or nearly-sorted data
*** Stable vs Unstable Sorting
- A stable sort is one which preserves the original order of the input set
- Elements of same value will be in order
- An unstable sort does not preserve order of elements
- Ordering will be only on the sorted value, original order is not respected
*** In-place and Out-of-place Sorting
- An *In-place algorithm* modifies the inputs, which can be a list or an array, without using
any additional memory As the algorithm runs, the input is usally overwritten by the output, so no
additional space is required.
- In-place algorithms may take some memory, like using some variables for its operation
- Overall, it takes constant memory. Space complexity of $O(1)$
- An algorithm that is not in place is called a *not-place* or *out-of-place* algorithm. These
sorting algorithms use =extra space= for sorting, which depends upon the size of the input
*** Bubble Sort
- *Bubble Sort* works by repeatedly swapping adjacent elements if they are in the wrong order
- Not suitable for large data sets as its average and worst-case complexity is quite high
- Bubble sort is an In-place and Stable sorting algorithm
- Big O's:
- Time Complexity: $O(n^2)$
- Space Complexity: $O(n^2)$
- Steps:
1. Walk through the array n-times
2. As you walk through the array, check if the current element and it's next neighbor are out
of order
3. If they are out of order, swap them
*** Selection Sort
- *Selection Sort* is an *in-place* algorithm in which the list is divided into two parts
1. The sorted part at the left end
2. The unsorted part at the right end
- The smallest element is selected from the unsorted array and swapped with the leftmost
element, and that element becoems a part of the sorted array. This process continues moving
unsorted array boundary by one element to the right.
- Selection sort is generally preferred over Bubble Sort
- Big O's:
- Time Complexity: $O(n^2)$
- Space Complexity: $O(1)$
- Steps:
1. Set ~MIN~ to location $0$
2. Search the minimum element in the list
3. Swap with value at location ~MIN~
4. Increment ~MIN~ to point to next element
5. Repeat until list is sorted
*** Insertion Sort
# TODO: Finish this section out
- Insertion sort is a simple sorting algorithm that works similar to the way you sort playing
cards in your hands
- The array is virtually split into a sorted and an unsorted part
- Values from the unsorted part are picked and placed at the correct position in the sorted part
- Steps:
1. If it is the first element, it is already sorted. return 1;
2. Pick next element
3. Compare with all elements in the sorted sub-list
4. Shift all elements in the sorted sub-list that is greater than the value to be sorted
*** Algorithm Comparison
| Bubble Sort | Selection Sort | Insertion Sort |
|-------------------------------|--------------------------------------------------------|--------------------------|
| Simple Sorting Algorithm | Simple Sorting Algorithm | Simple Sorting Algorithm |
| Compares Neighboring Elements | Takes the smallest element and moves it into its place | Transfer one element at a time to its
*** Merge Sort
- Divide and Conquer Algorithm
- Works by dividing an array into smaller subarrays, sorting each subarray, and then merging the
sorted subarrays back together
- Does not check if the data is already sorted.
- Steps:
1. Find middle index of the array: ~Middle = 1 + (last-first)/2~
2. Divide the array from the middle
3. Call merge sort for the first half of the array: ~MergeSort(array, first, middle)~
4. Call merge sort for the second half of the array: ~MergeSort(array, middle + 1, last)~
5. Merge the two sorted halves into a single sorted array
- Big O's:
- Time Complexity: $nlog(n)$
- Space Complexity: $O(n)$
*** Quick Sort
- Divide and Conquer Algorithm
- Picks an element as a pivot and partitions the given array around the picked pivot
- There are many different versions of Quick Sort that pick pivot in different ways:
1. Always pick the first element as a pivot
2. Always pick the last element as a pivot
3. Pick a random elemt as a pivot
4. Pick median as the pivot
- Big O's:
- Time Complexity: $O(n^2)$, average case is $O(nlog(n))$
- Steps:
1. Pick an element from the array as the pivot
2. Divide the unosrted array of elements in two arrays
a) Values less than the pivot come in the first sub array
b) Values greater than the pivot come in the second sub-array
3. Recursively repeat step ~2~ (until the sub-arrays are sorted)

View File

@ -0,0 +1,22 @@
* TODO Assignment 1 CS2124 :college:cs2124:
DEADLINE: <2024-01-29 Mon>
* TODO Assignment 2 CS2124 :college:cs2124:
DEADLINE: <2024-02-12 Mon>
* TODO Quiz 1 CS2124 :college:cs2124:
DEADLINE: <2024-02-22 Thu>
* TODO Mideterm Exam CS2124 :college:cs2124:
DEADLINE: <2024-02-29 Thu>
* TODO Assignment 3 :college:cs2124:
DEADLINE: <2024-03-18 Mon>
* TODO Assignment 4 :college:cs2124:
DEADLINE: <2024-04-01 Mon>
* TODO Quiz 2 :college:cs2124:
DEADLINE: <2024-04-30 Tue>
* TODO Final Exam :college:cs2124:
DEADLINE: <2024-05-07 Tue>

View File

@ -0,0 +1,4 @@
* TODO Assignment 1
DEADLINE: <2024-01-26 Fri> SCHEDULED: <2024-01-25 Thu>
Complete Zybooks section ~1~ and the first homework assignment

View File

@ -1,49 +1,50 @@
| Time | Monday | Tuesday | Wednesday | Thursday | Friday |
|------|-----------------------------|---------------------------------------|-------------|---------------------------------------|--------|
| 7am | | | | | |
| 8am | | (8:30am) Data Structures & Algorithms | | (8:30am) Data Structures & Algorithms | |
| 9am | | | | | |
| 10am | | | | Datastucures & Algorithms Lab | |
| 11am | | | | | |
| 12pm | | | | | |
| 1pm | | | | | |
| 2pm | Calculus II | Calculus II | Calculus II | Calculus II | |
| 3pm | Essence of Computer Science | | | | |
| 4pm | | | | | |
* Course Schedule
| Time | Monday | Tuesday | Wednesday | Thursday | Friday |
|------|-----------------------------|---------------------------------------|----------------------|---------------------------------------|----------------------|
| 7am | | | | | |
| 8am | | (8:30am) Data Structures & Algorithms | | (8:30am) Data Structures & Algorithms | |
| 9am | | | | | |
| 10am | | | | Datastucures & Algorithms Lab | |
| 11am | | | | | |
| 12pm | | | | | |
| 1pm | Discrete Mathematics | | Discrete Mathematics | | Discrete Mathematics |
| 2pm | | | | | |
| 3pm | Essence of Computer Science | | | | |
| 4pm | | | | | |
* Course Locations
* Calculus II :college:mat1224:
SCHEDULED: <2024-01-15 Mon 14:00-14:50 +1w><2024-01-16 Tue 14:00-14:50 +1w><2024-01-17 Wed 14:00-14:50 +1w><2024-01-18 Thu 14:00-14:50 +1w>
| Course | Location |
|------------------|--------------|
| =CS233= | =BB 3.02.12= |
| =CS2124 Lecture= | =BB 3.03.24= |
| =CS2124 Lab= | =NPB 1.226= |
| =CS1011= | =NPB 1.226= |
:PROPERTIES:
:COURSE: =MAT1224=
:LOCATION: =MH 3.02.26=
:END:
* Courses
* Essence of Computer Science :college:cs1011:
SCHEDULED: <2024-01-15 Mon 14:00 +1w>
** Discrete Mathematics :college:cs2233:
SCHEDULED: <2024-01-22 Mon 13:00-13:50><2024-01-24 Wed 13:00-13:50><2024-01-26 Fri 13:00-13:50>
:PROPERTIES:
:COURSE: =CS1011=
:LOCATION: =NPB 1.226=
:END:
- COURSE: =CS2233=
- LOCATION: =BB 3.
** Essence of Computer Science :college:cs1011:
SCHEDULED: <2024-01-15 Mon 15:00-15:50 +1w>
* Data Structures & Algorithms :college:cs2124:
- COURSE: =CS1011=
- LOCATION: =NPB 1.226=
:PROPERTIES:
:COURSE: =CS2124=
:END:
** Lecture :college:cs2124:
** Data Structures & Algorithms :college:cs2124:
- COURSE: =CS2124=
*** Lecture :college:cs2124:
SCHEDULED: <2024-01-16 Tue 08:30-09:45 +1w><2024-01-18 Thu 08:30-09:45 +1w>
:PROPERTIES:
:LOCATION: =BB 3.03.24=
:END:
- LOCATION =BB 3.03.24=
** Lab :college:cs2124:
*** Lab :college:cs2124:
SCHEDULED: <2024-01-18 Thu 10:00-10:50 +1w>
:PROPERTIES:
:LOCATION: =NPB 1.226=
:END:
- LOCATION =NPB 1.226=