Practice and reinforce the concepts from Lesson 5
Build audio-powered apps by:
Time Limit: 5 minutes
# Create new audio-focused project
npx create-expo-app MusicApp --template blank
cd MusicApp
# Install audio dependencies
npx expo install expo-av
npx expo install expo-media-library
npx expo install @react-native-async-storage/async-storage
npx expo install expo-file-system
Time Limit: 5 minutes
Update app.json
for audio permissions:
{
"expo": {
"name": "Music App",
"slug": "music-app",
"version": "1.0.0",
"platforms": ["ios", "android"],
"ios": {
"infoPlist": {
"NSMicrophoneUsageDescription": "This app uses microphone to record audio.",
"NSMediaLibraryUsageDescription": "This app accesses media library to play music."
}
},
"android": {
"permissions": [
"RECORD_AUDIO",
"READ_EXTERNAL_STORAGE",
"WRITE_EXTERNAL_STORAGE"
]
}
}
}
✅ Checkpoint: App runs and permissions configured!
Replace App.js
with a functional music player:
import React, { useState, useEffect } from 'react';
import {
StyleSheet,
Text,
View,
TouchableOpacity,
Image,
Dimensions,
ScrollView,
} from 'react-native';
import { Audio } from 'expo-av';
import { LinearGradient } from 'expo-linear-gradient';
import Slider from '@react-native-community/slider';
const { width } = Dimensions.get('window');
// Sample playlist with free music URLs
const SAMPLE_TRACKS = [
{
id: 1,
title: 'Chill Vibes',
artist: 'Sample Artist',
uri: 'https://www.soundjay.com/misc/sounds/bell-ringing-05.wav',
duration: 30000,
artwork: 'https://via.placeholder.com/300x300/667eea/white?text=Track+1',
},
{
id: 2,
title: 'Upbeat Energy',
artist: 'Demo Band',
uri: 'https://www.soundjay.com/misc/sounds/bell-ringing-05.wav',
duration: 45000,
artwork: 'https://via.placeholder.com/300x300/764ba2/white?text=Track+2',
},
];
export default function App() {
const [sound, setSound] = useState(null);
const [isPlaying, setIsPlaying] = useState(false);
const [currentTrack, setCurrentTrack] = useState(0);
const [position, setPosition] = useState(0);
const [duration, setDuration] = useState(0);
const [isLoading, setIsLoading] = useState(false);
useEffect(() => {
return sound
? () => {
sound.unloadAsync();
}
: undefined;
}, [sound]);
const loadAudio = async (track) => {
try {
setIsLoading(true);
if (sound) {
await sound.unloadAsync();
}
const { sound: newSound } = await Audio.Sound.createAsync(
{ uri: track.uri },
{ shouldPlay: false }
);
setSound(newSound);
newSound.setOnPlaybackStatusUpdate((status) => {
if (status.isLoaded) {
setPosition(status.positionMillis || 0);
setDuration(status.durationMillis || 0);
setIsPlaying(status.isPlaying);
}
});
setIsLoading(false);
} catch (error) {
console.error('Error loading audio:', error);
setIsLoading(false);
}
};
const playPause = async () => {
if (!sound) {
await loadAudio(SAMPLE_TRACKS[currentTrack]);
return;
}
if (isPlaying) {
await sound.pauseAsync();
} else {
await sound.playAsync();
}
};
const skipTrack = async (direction) => {
const newTrackIndex = direction === 'next'
? (currentTrack + 1) % SAMPLE_TRACKS.length
: currentTrack === 0
? SAMPLE_TRACKS.length - 1
: currentTrack - 1;
setCurrentTrack(newTrackIndex);
await loadAudio(SAMPLE_TRACKS[newTrackIndex]);
};
const onSeek = async (value) => {
if (sound) {
const seekPosition = value * duration;
await sound.setPositionAsync(seekPosition);
}
};
const formatTime = (millis) => {
const minutes = Math.floor(millis / 60000);
const seconds = ((millis % 60000) / 1000).toFixed(0);
return `${minutes}:${seconds.padStart(2, '0')}`;
};
const track = SAMPLE_TRACKS[currentTrack];
return (
<LinearGradient
colors={['#667eea', '#764ba2']}
style={styles.container}
>
{/* Album Art */}
<View style={styles.artworkContainer}>
<Image source={{ uri: track.artwork }} style={styles.artwork} />
</View>
{/* Track Info */}
<View style={styles.trackInfo}>
<Text style={styles.title}>{track.title}</Text>
<Text style={styles.artist}>{track.artist}</Text>
</View>
{/* Progress Bar */}
<View style={styles.progressContainer}>
<Slider
style={styles.slider}
minimumValue={0}
maximumValue={1}
value={duration ? position / duration : 0}
onValueChange={onSeek}
minimumTrackTintColor="#ffffff"
maximumTrackTintColor="rgba(255,255,255,0.3)"
thumbStyle={styles.sliderThumb}
/>
<View style={styles.timeContainer}>
<Text style={styles.time}>{formatTime(position)}</Text>
<Text style={styles.time}>{formatTime(duration)}</Text>
</View>
</View>
{/* Controls */}
<View style={styles.controls}>
<TouchableOpacity
style={styles.controlButton}
onPress={() => skipTrack('prev')}
>
<Text style={styles.controlText}>⏮</Text>
</TouchableOpacity>
<TouchableOpacity
style={[styles.playButton, isLoading && styles.loading]}
onPress={playPause}
disabled={isLoading}
>
<Text style={styles.playText}>
{isLoading ? '⏳' : isPlaying ? '⏸' : '▶'}
</Text>
</TouchableOpacity>
<TouchableOpacity
style={styles.controlButton}
onPress={() => skipTrack('next')}
>
<Text style={styles.controlText}>⏭</Text>
</TouchableOpacity>
</View>
</LinearGradient>
);
}
const styles = StyleSheet.create({
container: {
flex: 1,
justifyContent: 'center',
alignItems: 'center',
paddingHorizontal: 30,
},
artworkContainer: {
shadowColor: '#000',
shadowOffset: {
width: 0,
height: 8,
},
shadowOpacity: 0.44,
shadowRadius: 10.32,
elevation: 16,
marginBottom: 40,
},
artwork: {
width: width * 0.8,
height: width * 0.8,
borderRadius: 20,
},
trackInfo: {
alignItems: 'center',
marginBottom: 40,
},
title: {
fontSize: 24,
fontWeight: 'bold',
color: 'white',
textAlign: 'center',
},
artist: {
fontSize: 18,
color: 'rgba(255,255,255,0.8)',
marginTop: 5,
},
progressContainer: {
width: '100%',
marginBottom: 40,
},
slider: {
width: '100%',
height: 40,
},
sliderThumb: {
backgroundColor: 'white',
width: 20,
height: 20,
},
timeContainer: {
flexDirection: 'row',
justifyContent: 'space-between',
},
time: {
color: 'rgba(255,255,255,0.8)',
fontSize: 14,
},
controls: {
flexDirection: 'row',
alignItems: 'center',
justifyContent: 'space-around',
width: '60%',
},
controlButton: {
padding: 10,
},
controlText: {
color: 'white',
fontSize: 30,
},
playButton: {
backgroundColor: 'rgba(255,255,255,0.2)',
borderRadius: 40,
width: 80,
height: 80,
justifyContent: 'center',
alignItems: 'center',
},
loading: {
opacity: 0.5,
},
playText: {
color: 'white',
fontSize: 36,
},
});
Add recording functionality:
// Add to your imports
import * as FileSystem from 'expo-file-system';
// Add these states to your component
const [recording, setRecording] = useState(null);
const [recordedURI, setRecordedURI] = useState(null);
const [isRecording, setIsRecording] = useState(false);
// Add recording functions
const startRecording = async () => {
try {
const permission = await Audio.requestPermissionsAsync();
if (permission.status !== 'granted') {
alert('Permission to record audio is required!');
return;
}
await Audio.setAudioModeAsync({
allowsRecordingIOS: true,
playsInSilentModeIOS: true,
});
const { recording: newRecording } = await Audio.Recording.createAsync(
Audio.RECORDING_OPTIONS_PRESET_HIGH_QUALITY
);
setRecording(newRecording);
setIsRecording(true);
} catch (error) {
console.error('Failed to start recording:', error);
}
};
const stopRecording = async () => {
try {
setIsRecording(false);
await recording.stopAndUnloadAsync();
const uri = recording.getURI();
setRecordedURI(uri);
setRecording(null);
} catch (error) {
console.error('Failed to stop recording:', error);
}
};
const playRecording = async () => {
if (recordedURI) {
const { sound: recordedSound } = await Audio.Sound.createAsync(
{ uri: recordedURI },
{ shouldPlay: true }
);
setSound(recordedSound);
}
};
// Add recording controls to your JSX
{/* Recording Section */}
<View style={styles.recordingSection}>
<TouchableOpacity
style={[styles.recordButton, isRecording && styles.recording]}
onPress={isRecording ? stopRecording : startRecording}
>
<Text style={styles.recordText}>
{isRecording ? '⏹ Stop' : '🎤 Record'}
</Text>
</TouchableOpacity>
{recordedURI && (
<TouchableOpacity style={styles.playRecordingButton} onPress={playRecording}>
<Text style={styles.controlText}>▶ Play Recording</Text>
</TouchableOpacity>
)}
</View>
Create a simple audio visualizer:
// Add animated bars for visualization
const [audioLevels, setAudioLevels] = useState(Array(10).fill(0));
// Simulate audio levels (in a real app, you'd get these from audio analysis)
const updateVisualization = () => {
if (isPlaying) {
setAudioLevels(prev =>
prev.map(() => Math.random() * 100)
);
}
};
useEffect(() => {
const interval = setInterval(updateVisualization, 100);
return () => clearInterval(interval);
}, [isPlaying]);
// Add visualizer component
const Visualizer = ({ levels }) => (
<View style={styles.visualizer}>
{levels.map((level, index) => (
<View
key={index}
style={[
styles.visualizerBar,
{ height: Math.max(4, level / 2) }
]}
/>
))}
</View>
);
// Add to your JSX
<Visualizer levels={audioLevels} />
// Add styles
visualizer: {
flexDirection: 'row',
alignItems: 'flex-end',
height: 60,
marginVertical: 20,
justifyContent: 'space-around',
width: '80%',
},
visualizerBar: {
width: 4,
backgroundColor: 'white',
borderRadius: 2,
minHeight: 4,
},
Create beautiful, responsive audio controls:
// Animated play button
const AnimatedPlayButton = ({ isPlaying, onPress }) => {
return (
<TouchableOpacity
style={styles.animatedPlayButton}
onPress={onPress}
>
<LinearGradient
colors={['#ff6b6b', '#ee5a52']}
style={styles.playButtonGradient}
>
<Text style={styles.playButtonText}>
{isPlaying ? '⏸' : '▶'}
</Text>
</LinearGradient>
</TouchableOpacity>
);
};
// Custom volume control
const VolumeControl = ({ volume, onVolumeChange }) => (
<View style={styles.volumeContainer}>
<Text style={styles.volumeIcon}>🔈</Text>
<Slider
style={styles.volumeSlider}
minimumValue={0}
maximumValue={1}
value={volume}
onValueChange={onVolumeChange}
minimumTrackTintColor="#fff"
maximumTrackTintColor="rgba(255,255,255,0.3)"
/>
<Text style={styles.volumeIcon}>🔊</Text>
</View>
);
Build a scrollable playlist:
const PlaylistView = ({ tracks, currentTrack, onTrackSelect }) => (
<ScrollView style={styles.playlist}>
{tracks.map((track, index) => (
<TouchableOpacity
key={track.id}
style={[
styles.trackItem,
index === currentTrack && styles.currentTrackItem
]}
onPress={() => onTrackSelect(index)}
>
<Image source={{ uri: track.artwork }} style={styles.trackThumbnail} />
<View style={styles.trackDetails}>
<Text style={styles.trackTitle}>{track.title}</Text>
<Text style={styles.trackArtist}>{track.artist}</Text>
</View>
<Text style={styles.trackDuration}>
{formatTime(track.duration)}
</Text>
</TouchableOpacity>
))}
</ScrollView>
);
// Add audio effects (pitch, speed, reverb)
const [playbackRate, setPlaybackRate] = useState(1.0);
const [pitch, setPitch] = useState(1.0);
const applyAudioEffects = async () => {
if (sound) {
await sound.setRateAsync(playbackRate, true, pitch);
}
};
// Effects controls
const AudioEffects = () => (
<View style={styles.effectsPanel}>
<Text style={styles.effectLabel}>Speed: {playbackRate.toFixed(1)}x</Text>
<Slider
style={styles.effectSlider}
minimumValue={0.5}
maximumValue={2.0}
value={playbackRate}
onValueChange={setPlaybackRate}
onSlidingComplete={applyAudioEffects}
/>
<Text style={styles.effectLabel}>Pitch: {pitch.toFixed(1)}</Text>
<Slider
style={styles.effectSlider}
minimumValue={0.5}
maximumValue={2.0}
value={pitch}
onValueChange={setPitch}
onSlidingComplete={applyAudioEffects}
/>
</View>
);
// Enable background audio playback
import { Audio } from 'expo-av';
const setupBackgroundAudio = async () => {
try {
await Audio.setAudioModeAsync({
allowsRecordingIOS: false,
staysActiveInBackground: true,
interruptionModeIOS: Audio.INTERRUPTION_MODE_IOS_DO_NOT_MIX,
playsInSilentModeIOS: true,
shouldDuckAndroid: true,
interruptionModeAndroid: Audio.INTERRUPTION_MODE_ANDROID_DO_NOT_MIX,
playThroughEarpieceAndroid: false,
});
} catch (error) {
console.error('Error setting up background audio:', error);
}
};
// Call in useEffect
useEffect(() => {
setupBackgroundAudio();
}, []);
// Stream audio from URL
const streamAudio = async (url) => {
try {
const { sound: streamSound } = await Audio.Sound.createAsync(
{ uri: url },
{ shouldPlay: true },
(status) => {
if (status.isLoaded) {
// Handle streaming status
console.log('Streaming:', status.positionMillis);
}
}
);
setSound(streamSound);
} catch (error) {
console.error('Streaming error:', error);
}
};
// Buffering indicator
const [isBuffering, setIsBuffering] = useState(false);
// Add to audio status update
if (status.isBuffering) {
setIsBuffering(true);
} else {
setIsBuffering(false);
}
Create a grid of sound effects:
const SOUND_EFFECTS = [
{ id: 1, name: 'Applause', emoji: '👏', uri: 'applause.wav' },
{ id: 2, name: 'Drum', emoji: '🥁', uri: 'drum.wav' },
{ id: 3, name: 'Bell', emoji: '🔔', uri: 'bell.wav' },
{ id: 4, name: 'Horn', emoji: '📢', uri: 'horn.wav' },
];
const SoundBoard = () => (
<View style={styles.soundBoard}>
{SOUND_EFFECTS.map(effect => (
<TouchableOpacity
key={effect.id}
style={styles.soundButton}
onPress={() => playSound(effect.uri)}
>
<Text style={styles.soundEmoji}>{effect.emoji}</Text>
<Text style={styles.soundName}>{effect.name}</Text>
</TouchableOpacity>
))}
</View>
);
Build a "guess the sound" game:
const AudioQuiz = () => {
const [currentQuestion, setCurrentQuestion] = useState(0);
const [score, setScore] = useState(0);
const [showAnswer, setShowAnswer] = useState(false);
const questions = [
{ audio: 'dog-bark.wav', options: ['Dog', 'Cat', 'Bird'], answer: 0 },
{ audio: 'rain.wav', options: ['Rain', 'Fire', 'Wind'], answer: 0 },
];
const playQuestionAudio = () => {
const question = questions[currentQuestion];
playSound(question.audio);
};
const selectAnswer = (answerIndex) => {
const question = questions[currentQuestion];
if (answerIndex === question.answer) {
setScore(prev => prev + 1);
}
setShowAnswer(true);
};
return (
<View style={styles.quizContainer}>
<Text style={styles.quizTitle}>Audio Quiz</Text>
<Text style={styles.score}>Score: {score}</Text>
<TouchableOpacity style={styles.playButton} onPress={playQuestionAudio}>
<Text style={styles.playButtonText}>Play Sound</Text>
</TouchableOpacity>
{questions[currentQuestion].options.map((option, index) => (
<TouchableOpacity
key={index}
style={styles.optionButton}
onPress={() => selectAnswer(index)}
disabled={showAnswer}
>
<Text style={styles.optionText}>{option}</Text>
</TouchableOpacity>
))}
</View>
);
};
// Proper cleanup
const cleanupAudio = async () => {
if (sound) {
await sound.unloadAsync();
setSound(null);
}
if (recording) {
await recording.stopAndUnloadAsync();
setRecording(null);
}
};
// Use in cleanup
useEffect(() => {
return () => {
cleanupAudio();
};
}, []);
// Cache frequently used sounds
const audioCache = new Map();
const getCachedSound = async (uri) => {
if (audioCache.has(uri)) {
return audioCache.get(uri);
}
const { sound } = await Audio.Sound.createAsync({ uri });
audioCache.set(uri, sound);
return sound;
};
Audio App Successfully Built If:
Time Investment: 60 minutes total Difficulty Level: Intermediate to Advanced Prerequisites: React Native basics, understanding of audio concepts Tools Needed: Physical device for testing (audio doesn't work in simulator)