Check out my blog post!
There are already some tries (here and here) to automate creating videos with fastlane snapshot. Anyway Felix definitely does not want to have an http server in his code base :)
The following step by step guide shows an alternative way of solution. The outline corresponds to the commit history.
Just create a single view application in XCode without any tests.
cd
to your project folder and run fastlane init
. When fastlane is asking what you would like to use it for press "1".
Do exactly what fastlane is telling you afterwards. After finishing the instructions open the newly created Snapfile in your projects fastlane folder. Uncomment at least one device and one language. Make sure snapshot("0Launch")
is called in one of your UITests. Run fastlane snapshot
in your project folder to verify everything is working fine.
To test the later added video recording feature, we need something to record. Therefore just add a button to your first ViewController and add a second ViewController with a distinguishable background. Push the second ViewController, when the button is clicked. Also set the buttons accessibility identifier to "button"
.
Now that your storyboard is set up, let's add video related code to the SnapshotHelper file. To keep the original snapshot logic, add the following two functions to the SnapshotHelper file scope.
func snaptake(_ name: String, waitForLoadingIndicator: Bool, plot: ()->()) {
if waitForLoadingIndicator {
Snapshot.snaptake(name, plot: plot)
} else {
Snapshot.snaptake(name, timeWaitingForIdle: 0, plot: plot)
}
}
/// - Parameters:
/// - name: The name of the snaptake
/// - timeout: Amount of seconds to wait until the network loading indicator disappears. Pass `0` if you don't want to wait.
/// - plot: Plot which should be recorded.
func snaptake(_ name: String, timeWaitingForIdle timeout: TimeInterval = 20, plot: ()->()) {
Snapshot.snaptake(name, timeWaitingForIdle: timeout, plot: plot)
}
These two functions are pretty similar to the already existing snapshot functions. The only difference lies in the additional argument plot: ()->()
, which is a closure with no parameters and return values. plot
contains all the interface interactions you want to record. You will see how to use it later.
Within your Snapshot class add the actual recording logic. snaptake
takes plot
as an argument and successively calls snaptakeStart()
, snaptakeSetTrimmingFlag()
, plot()
and snaptakeStop()
.
open class func snaptake(_ name: String, timeWaitingForIdle timeout: TimeInterval = 20, plot: ()->()) {
guard let recordingFlagPath = snaptakeStart(name, timeWaitingForIdle: timeout) else { return }
snaptakeSetTrimmingFlag()
plot()
snaptakeStop(recordingFlagPath)
}
Within snaptakeStart
a recordingFlag is saved to your hard drive. This recordingFlag contains the path of the later recorded video. The saving of this recordingFlag is watched outside of XCode to start the actual recording process. You will see how this works later.
class func snaptakeStart(_ name: String, timeWaitingForIdle timeout: TimeInterval = 20) -> URL? {
if timeout > 0 {
waitForLoadingIndicatorToDisappear(within: timeout)
}
print("snaptake: \(name)")
sleep(1) // Waiting for the animation to be finished (kind of)
#if os(OSX)
XCUIApplication().typeKey(XCUIKeyboardKeySecondaryFn, modifierFlags: [])
#else
guard let simulator = ProcessInfo().environment["SIMULATOR_DEVICE_NAME"], let screenshotsDir = screenshotsDirectory else { return nil }
let path = "screenshots/\(locale)/\(simulator)-\(name).mp4"
let recordingFlagPath = screenshotsDir.appendingPathComponent("recordingFlag.txt")
do {
try path.write(to: recordingFlagPath, atomically: false, encoding: String.Encoding.utf8)
} catch let error {
print("Problem setting recording flag: \(recordingFlagPath)")
print(error)
}
#endif
return recordingFlagPath
}
There is a pretty annoying bug, when recording videos via console: The first few frames appear black until somethings happens within your application. That's why we are going to rotate the device and save related duration in snaptakeSetTrimmingFlag
. Later we will trim the recorded video accordingly.
class func snaptakeSetTrimmingFlag() {
let start = Date()
sleep(2)
XCUIDevice.shared.orientation = .landscapeLeft
sleep(2)
XCUIDevice.shared.orientation = .portrait
let trimmingTime = -start.timeIntervalSinceNow - 2
let hours = Int(trimmingTime)/3600
let minutes = (Int(trimmingTime)/60)%60
let seconds = Int(trimmingTime)%60
let milliseconds = Int((trimmingTime - Double(Int(trimmingTime))) * 1000)
let trimmingTimeString = String(format:"%02i:%02i:%02i.%03i", hours, minutes, seconds, milliseconds)
#if os(OSX)
XCUIApplication().typeKey(XCUIKeyboardKeySecondaryFn, modifierFlags: [])
#else
guard let screenshotsDir = screenshotsDirectory else { return }
let trimmingFlagPath = screenshotsDir.appendingPathComponent("trimmingFlag.txt")
do {
try trimmingTimeString.write(to: trimmingFlagPath, atomically: false, encoding: String.Encoding.utf8)
} catch let error {
print("Problem setting recording flag: \(trimmingFlagPath)")
print(error)
}
#endif
}
After we called plot
in snaptake
we finally are going to stop recording in snaptakeStop
. We are doing so by removing the recordingFlag
we added earlier in snaptakeStart
.
class func snaptakeStop(_ recordingFlagPath: URL) {
let fileManager = FileManager.default
do {
try fileManager.removeItem(at: recordingFlagPath)
} catch let error {
print("Problem removing recording flag: \(recordingFlagPath)")
print(error)
}
}
Finally add the following test function within SnaptakeUITests file. The function contains our plot where our button is simply tapped.
func testExample() {
snaptake("testExample") {
XCUIApplication().buttons["button"].tap()
}
}
After your UITests are fully set up we need to add related logic outside of XCode. Within your Gemfile in your fastlane folder add gem "listen"
. Within your Snapfile remove output_directory("./screenshots")
. Now we are ready to create a videos lane in your Fastfile. The videos lane is more or less self-explaining. The most relevant part is the recordingListener
. Within its handlers the video reording process is started and stopped, when the recordingFlag is added or removed. When recording is stopped, the trimming time for the resulting video is read from our trimmingFlag and stored in trimming_time_dictionary
. sh("cd .. && fastlane snapshot --concurrent_simulators false && cd fastlane")
builds Snaptake and runs SnaptakeUITests, so our recordingListener
could actually be triggered. After recording any videos, they are trimmed and reencoded.
desc "Generate new localized videos"
lane :videos do |options|
### RECORDING VIDEOS
# Delete all existing videos
mp4_file_paths = Find.find('screenshots').select { |p| /.*\.mp4$/ =~ p}
for mp4_file_path in mp4_file_paths
File.delete(mp4_file_path)
end
# Ensure that caching folder for screenshots and recording flags exists
Dir.mkdir(File.expand_path('~/Library/Caches/tools.fastlane/screenshots')) unless Dir.exist?(File.expand_path('~/Library/Caches/tools.fastlane/screenshots'))
# Setup listeners for starting and ending recording
fastlane_require 'listen'
path = nil
process = nil
trimming_time_dictionary = {}
recordingListener = Listen.to(File.expand_path('~/Library/Caches/tools.fastlane/screenshots'), only: /\.txt$/) do |modified, added, removed|
if (!added.empty?) && File.basename(added.first) == 'recordingFlag.txt'
recording_flag_path = added.first
path = File.read(recording_flag_path)
process = IO.popen("xcrun simctl io booted recordVideo '#{path}'") # Start recording of current simulator to path determined in recordingFlag.txt
end
if (!removed.empty?) && File.basename(removed.first) == 'recordingFlag.txt'
pid = process.pid
Process.kill("INT", pid) # Stop recording by killing process with id pid
trimming_flag_path = File.expand_path('~/Library/Caches/tools.fastlane/screenshots/trimmingFlag.txt')
trimming_time = File.read(trimming_flag_path)
trimming_time_dictionary[path] = trimming_time # Storing trimming time determined in trimmingFlag.txt for recorded video (necessary due to initial black simulator screen after starting recording)
end
end
# Build SnaptakeUITests and Snaptake and run UITests
recordingListener.start
sh("cd .. && fastlane snapshot --concurrent_simulators false && cd fastlane")
recordingListener.stop
### EDIT VIDEOS
sleep(3)
# Trim videos and reencode
mp4_file_paths = Find.find('screenshots').select { |p| /.*\.mp4$/ =~ p}
for mp4_file_path in mp4_file_paths
trimmed_path = mp4_file_path.chomp('.mp4') + '-trimmed.mp4'
trimming_time = trimming_time_dictionary[mp4_file_path]
sh("ffmpeg -ss '#{trimming_time}' -i '#{mp4_file_path}' -c:v copy -r 30 '#{trimmed_path}'") # Trimming the Beginning of the Videos
File.delete(mp4_file_path)
final_path = trimmed_path.chomp('-trimmed.mp4') + '-final.mp4'
sh("ffmpeg -i '#{trimmed_path}' -ar 44100 -ab 256k -r 30 -crf 22 -profile:v main -pix_fmt yuv420p -y -max_muxing_queue_size 1000 '#{final_path}'")
File.delete(trimmed_path)
end
end
By calling fastlane videos
we are creating our test video:
You want to know what is possible with this procedure?
Checkout Bonprix with your iPhone in the (e.g. German) Appstore!
You want to solve similarly exciting technical questions?
Join us at apploft!