Making a take function myself - clojure

I'm trying to make a take function myself, but this appears to be giving a stack overflow, any idea what may be causing it?
(defn my-take-plus [n Lst LstAcc count]
(let [LstVec (into [] Lst)]
(cond (= count n) LstAcc
:else
(do
(conj LstAcc (first LstVec))
(inc count)
(my-take-plus n (apply list(rest LstVec)) LstAcc count)
)
)
)
)
(defn my-take [n Lst]
(my-take-plus n Lst [] 0)
)

also, there is one more 'clojurish' way to do this:
(defn my-take [n data]
(when (and (pos? n) (seq data))
(lazy-seq
(cons (first data)
(my-take (dec n) (rest data))))))
this one is lazy, and also prevents stack overflow.. Moreover, as far as i remember, the clojure.core/take is implemented in a similar way

I would consider using a loop/recur strategy so that Clojure does tail-call optimization (TCO) to prevent a Stack Overflow.
(defn take' [n coll]
(loop [n n
acc []
coll coll]
(cond
(empty? coll) acc
((comp not pos?) n) acc
:else (recur (dec n) (conj acc (first coll)) (rest coll)))))
In your example, I would've considered using an if since you only had to conditional branches. cond is generally used more like a case statement.

Related

Clojure - Using recursion to find the number of elements in a list

I have written a function that uses recursion to find the number of elements in a list and it works successfully however, I don't particularly like the way I've written it. Now I've written it one way I can't seem to think of a different way of doing it.
My code is below:
(def length
(fn [n]
(loop [i n total 0]
(cond (= 0 i) total
:t (recur (rest i)(inc total))))))
To me it seems like it is over complicated, can anyone think of another way this can be written for comparison?
Any help greatly appreciated.
Here is a naive recursive version:
(defn my-count [coll]
(if (empty? coll)
0
(inc (my-count (rest coll)))))
Bear in mind there's not going to be any tail call optimization going on here so for long lists the stack will overflow.
Here is a version using reduce:
(defn my-count [coll]
(reduce (fn [acc x] (inc acc)) 0 coll))
Here is code showing some different solutions. Normally, you should use the built-in function count.
(def data [:one :two :three])
(defn count-loop [data]
(loop [cnt 0
remaining data]
(if (empty? remaining)
cnt
(recur (inc cnt) (rest remaining)))))
(defn count-recursive [remaining]
(if (empty? remaining)
0
(inc (count-recursive (rest remaining)))))
(defn count-imperative [data]
(let [cnt (atom 0)]
(doseq [elem data]
(swap! cnt inc))
#cnt))
(deftest t-count
(is (= 3 (count data)))
(is (= 3 (count-loop data)))
(is (= 3 (count-recursive data)))
(is (= 3 (count-imperative data))))
Here's one that is tail-call optimized, and doesn't rely on loop. Basically the same as Alan Thompson's first one, but inner functions are the best things. (And feel more idiomatic to me.) :-)
(defn my-count [sq]
(letfn [(inner-count [c s]
(if (empty? s)
c
(recur (inc c) (rest s))))]
(inner-count 0 sq)))
Just for completeness, here is another twist
(defn my-count
([data]
(my-count data 0))
([data counter]
(if (empty? data)
counter
(recur (rest data) (inc counter)))))

Checking odd parity in clojure

I have the following functions that check for odd parity in sequence
(defn countOf[a-seq elem]
(loop [number 0 currentSeq a-seq]
(cond (empty? currentSeq) number
(= (first currentSeq) elem) (recur (inc number) (rest currentSeq))
:else (recur number (rest currentSeq))
)
)
)
(defn filteredSeq[a-seq elemToRemove]
(remove (set (vector (first a-seq))) a-seq)
)
(defn parity [a-seq]
(loop [resultset [] currentSeq a-seq]
(cond (empty? currentSeq) (set resultset)
(odd? (countOf currentSeq (first currentSeq))) (recur (concat resultset (vector(first currentSeq))) (filteredSeq currentSeq (first currentSeq)))
:else (recur resultset (filteredSeq currentSeq (first currentSeq)))
)
)
)
for example (parity [1 1 1 2 2 3]) -> (1 3) that is it picks odd number of elements from a sequence.
Is there a better way to achieve this?
How can this be done with reduce function of clojure
First, I decided to make more idiomatic versions of your code, so I could really see what it was doing:
;; idiomatic naming
;; no need to rewrite count and filter for this code
;; putting item and collection in idiomatic argument order
(defn count-of [elem a-seq]
(count (filter #(= elem %) a-seq)))
;; idiomatic naming
;; putting item and collection in idiomatic argument order
;; actually used the elem-to-remove argument
(defn filtered-seq [elem-to-remove a-seq]
(remove #(= elem-to-remove %) a-seq))
;; idiomatic naming
;; if you want a set, use a set from the beginning
;; destructuring rather than repeated usage of first
;; use rest to recur when the first item is guaranteed to be dropped
(defn idiomatic-parity [a-seq]
(loop [result-set #{}
[elem & others :as current-seq] a-seq]
(cond (empty? current-seq)
result-set
(odd? (count-of elem current-seq))
(recur (conj result-set elem) (filtered-seq elem others))
:else
(recur result-set (filtered-seq elem others)))))
Next, as requested, a version that uses reduce to accumulate the result:
;; mapcat allows us to return 0 or more results for each input
(defn reducing-parity [a-seq]
(set
(mapcat
(fn [[k v]]
(when (odd? v) [k]))
(reduce (fn [result item]
(update-in result [item] (fnil inc 0)))
{}
a-seq))))
But, reading over this, I notice that the reduce is just frequencies, a built in clojure function. And my mapcat was really just a hand-rolled keep, another built in.
(defn most-idiomatic-parity [a-seq]
(set
(keep
(fn [[k v]]
(when (odd? v) k))
(frequencies a-seq))))
In Clojure we can refine our code, and as we recognize places where our logic replicates the built in functionality, we can simplify the code and make it more clear. Also, there is a good chance the built in is better optimized than our own work-alikes.
Is there a better way to achieve this?
(defn parity [coll]
(->> coll
frequencies
(filter (fn [[_ v]] (odd? v)))
(map first)
set))
For example,
(parity [1 1 1 2 1 2 1 3])
;#{1 3}
How can this be done with reduce function of clojure.
We can use reduce to rewrite frequencies:
(defn frequencies [coll]
(reduce
(fn [acc x] (assoc acc x (inc (get acc x 0))))
{}
coll))
... and again to implement parity in terms of it:
(defn parity [coll]
(let [freqs (frequencies coll)]
(reduce (fn [s [k v]] (if (odd? v) (conj s k) s)) #{} freqs)))

Grouping words and more

I'm working on a project to learn Clojure in practice. I'm doing well, but sometimes I get stuck. This time I need to transform sequence of the form:
[":keyword0" "word0" "word1" ":keyword1" "word2" "word3"]
into:
[[:keyword0 "word0" "word1"] [:keyword1 "word2" "word3"]]
I'm trying for at least two hours, but I know not so many Clojure functions to compose something useful to solve the problem in functional manner.
I think that this transformation should include some partition, here is my attempt:
(partition-by (fn [x] (.startsWith x ":")) *1)
But the result looks like this:
((":keyword0") ("word1" "word2") (":keyword1") ("word3" "word4"))
Now I should group it again... I doubt that I'm doing right things here... Also, I need to convert strings (only those that begin with :) into keywords. I think this combination should work:
(keyword (subs ":keyword0" 1))
How to write a function which performs the transformation in most idiomatic way?
Here is a high performance version, using reduce
(reduce (fn [acc next]
(if (.startsWith next ":")
(conj acc [(-> next (subs 1) keyword)])
(conj (pop acc) (conj (peek acc)
next))))
[] data)
Alternatively, you could extend your code like this
(->> data
(partition-by #(.startsWith % ":"))
(partition 2)
(map (fn [[[kw-str] strs]]
(cons (-> kw-str
(subs 1)
keyword)
strs))))
what about that:
(defn group-that [ arg ]
(if (not-empty arg)
(loop [list arg, acc [], result []]
(if (not-empty list)
(if (.startsWith (first list) ":")
(if (not-empty acc)
(recur (rest list) (vector (first list)) (conj result acc))
(recur (rest list) (vector (first list)) result))
(recur (rest list) (conj acc (first list)) result))
(conj result acc)
))))
Just 1x iteration over the Seq and without any need of macros.
Since the question is already here... This is my best effort:
(def data [":keyword0" "word0" "word1" ":keyword1" "word2" "word3"])
(->> data
(partition-by (fn [x] (.startsWith x ":")))
(partition 2)
(map (fn [[[k] w]] (apply conj [(keyword (subs k 1))] w))))
I'm still looking for a better solution or criticism of this one.
First, let's construct a function that breaks vector v into sub-vectors, the breaks occurring everywhere property pred holds.
(defn breakv-by [pred v]
(let [break-points (filter identity (map-indexed (fn [n x] (when (pred x) n)) v))
starts (cons 0 break-points)
finishes (concat break-points [(count v)])]
(mapv (partial subvec v) starts finishes)))
For our case, given
(def data [":keyword0" "word0" "word1" ":keyword1" "word2" "word3"])
then
(breakv-by #(= (first %) \:) data)
produces
[[] [":keyword0" "word0" "word1"] [":keyword1" "word2" "word3"]]
Notice that the initial sub-vector is different:
It has no element for which the predicate holds.
It can be of length zero.
All the others
start with their only element for which the predicate holds and
are at least of length 1.
So breakv-by behaves properly with data that
doesn't start with a breaking element or
has a succession of breaking elements.
For the purposes of the question, we need to muck about with what breakv-by produces somewhat:
(let [pieces (breakv-by #(= (first %) \:) data)]
(mapv
#(update-in % [0] (fn [s] (keyword (subs s 1))))
(rest pieces)))
;[[:keyword0 "word0" "word1"] [:keyword1 "word2" "word3"]]

Clojure Lazy Sequence Issue

I'm working on 4clojure problems and a similar issue keeps coming up. I'll write a solution that works for all but one of the test cases. It's usually the one that is checking for lazy evaluation. The solution below works for all but the last test case. I've tried all kinds of solutions and can't seem to get it to stop evaluating until integer overflow. I read the chapter on lazy sequences in Joy of Clojure, but I'm having a hard time implementing them. Is there a rule of thumb I'm forgetting, like don't use loop or something like that?
; This version is non working at the moment, will try to edit a version that works
(defn i-between [p k coll]
(loop [v [] coll coll]
(let [i (first coll) coll (rest coll) n (first coll)]
(cond (and i n)
(let [ret (if (p i n) (cons k (cons i v)) (cons i v))]
(recur ret coll))
i
(cons i v )
:else v))))
Problem 132
Ultimate solution for those curious:
(fn i-between [p k coll]
(letfn [(looper [coll]
(if (empty? coll) coll
(let [[h s & xs] coll
c (cond (and h s (p h s))
(list h k )
(and h s)
(list h )
:else (list h))]
(lazy-cat c (looper (rest coll))))
))] (looper coll)))
When I think about lazy sequences, what usually works is thinking about incremental cons'ing
That is, each recursion step only adds a single element to the list, and of course you never use loop.
So what you have is something like this:
(cons (generate first) (recur rest))
When wrapped on lazy-seq, only the needed elements from the sequence are realized, for instance.
(take 5 (some-lazy-fn))
Would only do 5 recursion calls to realize the needed elements.
A tentative, far from perfect solution to the 4clojure problem, that demonstrates the idea:
(fn intercalate
[pred value col]
(letfn [(looper [s head]
(lazy-seq
(if-let [sec (first s)]
(if (pred head sec)
(cons head (cons value (looper (rest s) sec)))
(cons head (looper (rest s) sec)))
(if head [head] []))))]
(looper (rest col) (first col))))
There, the local recursive function is looper, for each element tests if the predicate is true, in that case realizes two elements(adds the interleaved one), otherwise realize just one.
Also, you can avoid recursion using higher order functions
(fn [p v xs]
(mapcat
#(if (p %1 %2) [%1 v] [%1])
xs
(lazy-cat (rest xs) (take 1 xs))))
But as #noisesmith said in the comment, you're just calling a function that calls lazy-seq.

Insertion sort in clojure throws StackOverFlow error

(defn insert [s k]
(let [spl (split-with #(< % k) s)]
(concat (first spl) (list k) (last spl))))
(defn insert-sort [s]
(reduce (fn [s k] (insert s k)) '() s))
(insert-sort (reverse (range 5000)))
throws a stack over flow error. What am I doing wrong here?
Same issue as with Recursive function causing a stack overflow. Concat builds up a bunch of nested lazy sequences like (concat (concat (concat ...))) without doing any actual work, and then when you force the first element all the concats must get resolved at once, blowing the stack.
Your reduce creates new list each time.
My implementation:
(defn- insert [el seq]
(if (empty? seq) (cons el seq)
(if (< el (first seq)) (cons el seq)
(cons (first seq) (insert el (rest seq))))))
(defn insertion-sort
([seq sorted]
(if (empty? seq) sorted
(recur (rest seq) (insert (first seq) sorted))))
([seq]
(insertion-sort seq nil)))
As the main answer suggests, the list concat is the offender. Calling "doall", with that list as input... will result in an ISeq :
;;insertion sort helper
(defn insert [s k]
;;find the insert point
(let [spl (split-with #(< % k) s)
ret (concat (first spl) (list k) (last spl))]
(doall ret)))
;;insertion sort
(defn insert-sort [s]
(reduce (fn [s k] (insert s k)) '() s))
But wait... Is the sequence still lazy ?
The following hack of the above code, interestingly, indicates that the sequence is indeed still lazy !
;;insertion sort helper
(defn insert [s k]
;;find the insert point
(let [spl (split-with #(< % k) s)
ret (concat (first spl) (list k) (last spl))
ret2 (doall ret)
_ (println "final " (.getClass ret2))]
ret2))
;;insertion sort
(defn insert-sort [s]
(reduce (fn [s k] (insert s k)) '() s))
So, if the list is still lazy, then why does the use of doall fix anything ?
The "doall" function is not gauranteed to return a "non lazy" list, but rather, it gaurantees that the list which it DOES return will have been evaluated by a full walk through through.
Thus, the essence of the problem is the multiple function calls, the laziness is certainly related to this aspect of the code in your original question, but it is not the "primary" source of the overflow.