Skip to content

Instantly share code, notes, and snippets.

@dennda
Created October 11, 2018 15:06
Show Gist options
  • Save dennda/f4153301a512187217c92518303057ca to your computer and use it in GitHub Desktop.
Save dennda/f4153301a512187217c92518303057ca to your computer and use it in GitHub Desktop.
diff --git a/DuolingoMobile/Sources/Explanations/ExplanationCells.swift b/DuolingoMobile/Sources/Explanations/ExplanationCells.swift
index 3ad70f9b0b..ad5af4c56e 100644
--- a/DuolingoMobile/Sources/Explanations/ExplanationCells.swift
+++ b/DuolingoMobile/Sources/Explanations/ExplanationCells.swift
@@ -609,23 +609,23 @@ private class PhonemeView: UIControl {
}
/// Called when the control is tapped.
- var handleTap: (() -> Void)?
+// var handleTap: (() -> Void)?
/// The label displaying the textual representation of the phoneme.
private let label = UILabel()
/// The speaker icon indicating that the control can be tapped to listen.
- private let speakerIcon = PDFView()
+ private let speakerView: UIView
- init() {
- super.init(frame: .zero)
+ init(speakerView: UIView) {
+ self.speakerView = speakerView
- speakerIcon.resourceName = "speaker-active"
+ super.init(frame: .zero)
- addSubview(speakerIcon)
+ addSubview(speakerView)
addSubview(label)
- label.font = duoStyle.strongFont
+ label.font = isJuicy ? UIFont.standard(size: 100, weight: .bold) : duoStyle.strongFont
label.textAlignment = .center
label.baselineAdjustment = .alignCenters
@@ -638,24 +638,32 @@ private class PhonemeView: UIControl {
backgroundColor = .white
// Add a shadow
- layer.shadowColor = UIColor.black.cgColor
- layer.shadowOpacity = 0.1
- layer.shadowRadius = 7.0
- layer.shadowOffset = CGSize(width: 0, height: 1)
- layer.rasterizationScale = UIScreen.main.scale
- layer.shouldRasterize = true
- layer.cornerRadius = 8.0
- clipsToBounds = false
- addTarget(self, action: #selector(PhonemeView.touchUpInside(_:)), for: .touchUpInside)
+ if isJuicy {
+ isOpaque = false
+ label.backgroundColor = .clear
+ label.isOpaque = false
+ } else {
+ layer.shadowColor = UIColor.black.cgColor
+ layer.shadowOpacity = 0.1
+ layer.shadowRadius = 7.0
+ layer.shadowOffset = CGSize(width: 0, height: 1)
+ layer.rasterizationScale = UIScreen.main.scale
+ layer.shouldRasterize = true
+ layer.cornerRadius = 8.0
+ clipsToBounds = false
+ }
+// addTarget(self, action: #selector(PhonemeView.touchUpInside(_:)), for: .touchUpInside)
}
- @objc func touchUpInside(_ sender: UIControl) {
- handleTap?()
- }
+// @objc func touchUpInside(_ sender: UIControl) {
+// handleTap?()
+// }
override var isHighlighted: Bool {
didSet {
- backgroundColor = isHighlighted ? Color.cloud.light : Color.creme.light
+ if !isJuicy {
+ backgroundColor = isHighlighted ? Color.cloud.light : Color.creme.light
+ }
}
}
@@ -669,8 +677,9 @@ private class PhonemeView: UIControl {
let speakerDimension = layoutMetrics.phonemeElementSpeakerDimension
let speakerInset = layoutMetrics.phonemeElementSpeakerInset
let speakerX = bounds.width - (speakerDimension + speakerInset)
- speakerIcon.frame = CGRect(x: speakerX,
- y: speakerInset,
+ let speakerY = bounds.height - (speakerDimension + speakerInset)
+ speakerView.frame = CGRect(x: speakerX,
+ y: speakerY,
width: speakerDimension,
height: speakerDimension).integral
@@ -701,6 +710,8 @@ class AudioSampleCell: ExplanationCell, UIGestureRecognizerDelegate {
phonemeView.text = model.sampleText.toAttributedString(style: styledStringBaseStyle)
+ if isJuicy
+
label.model = HintLabelModel(text: attributedString(fromTextElement: model.description),
hints: model.description.hints)
@@ -708,10 +719,28 @@ class AudioSampleCell: ExplanationCell, UIGestureRecognizerDelegate {
}
}
+ private let speaker: Speaker
+
@objc override init(frame: CGRect) {
+ var _phonemeView: PhonemeView?
+ let speaker = Speaker(speed: .normal, embedSpeaker: { speakerView in
+ let phonemeView = PhonemeView(speakerView: speakerView)
+ phonemeView.backgroundColor = UIColor.clear
+ _phonemeView = phonemeView
+ let button = JuicyButton.primaryButton(customView: phonemeView,
+ customViewLayout: .fill(customInsets: .zero),
+ cornerRadius: 24)
+ return button
+ })
+ self.speaker = speaker
+ guard let phonemeView = _phonemeView else {
+ fatalError("Should have gotten a phoneme view.")
+ }
+ self.phonemeView = phonemeView
+
super.init(frame: frame)
- contentView.addSubview(phonemeView)
+ contentView.addSubview(speaker.button)
contentView.addSubview(label)
// We install the recognizer here rather than in the TappableLabel so we can get tighter control and inform our
@@ -720,7 +749,7 @@ class AudioSampleCell: ExplanationCell, UIGestureRecognizerDelegate {
tapRecognizer.delegate = self
contentView.addGestureRecognizer(tapRecognizer)
- phonemeView.handleTap = { [weak self] in
+ speaker.button.tapHandler = { [weak self] in
guard let model = self?.currentModel else {
return
}
@@ -730,7 +759,7 @@ class AudioSampleCell: ExplanationCell, UIGestureRecognizerDelegate {
func gestureRecognizer(_ gestureRecognizer: UIGestureRecognizer, shouldReceive touch: UITouch) -> Bool {
let point = touch.location(in: contentView)
- if phonemeView.point(inside: point, with: nil) {
+ if speaker.button.point(inside: point, with: nil) {
return false
}
return true
@@ -777,7 +806,7 @@ class AudioSampleCell: ExplanationCell, UIGestureRecognizerDelegate {
let labelFrame = CGRect(x: labelX, y: labelY, width: labelWidth, height: labelHeight)
label.sizeToFit()
- phonemeView.frame = tokenFrame
+ speaker.button.frame = tokenFrame
label.frame = labelFrame
}
@@ -787,7 +816,9 @@ class AudioSampleCell: ExplanationCell, UIGestureRecognizerDelegate {
typealias Model = AudioSampleElementModel
- private let phonemeView = PhonemeView()
+// private let phonemeView = PhonemeView()
+
+ private let phonemeView: PhonemeView
private let label = makeLabel()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment